<?xml version="1.0" encoding="UTF-8"?><rss version="2.0"
	xmlns:content="http://purl.org/rss/1.0/modules/content/"
	xmlns:wfw="http://wellformedweb.org/CommentAPI/"
	xmlns:dc="http://purl.org/dc/elements/1.1/"
	xmlns:atom="http://www.w3.org/2005/Atom"
	xmlns:sy="http://purl.org/rss/1.0/modules/syndication/"
	xmlns:slash="http://purl.org/rss/1.0/modules/slash/"
	
	xmlns:georss="http://www.georss.org/georss"
	xmlns:geo="http://www.w3.org/2003/01/geo/wgs84_pos#"
	>

<channel>
	<title>Autonomous Mobile Robots | Kudan global</title>
	<atom:link href="https://www.kudan.io/blog/tag/autonomous-mobile-robots/feed/" rel="self" type="application/rss+xml" />
	<link>https://www.kudan.io</link>
	<description>Kudan has been providing proprietary Artificial Perception technologies based on SLAM to enable use cases with significant market potential and impact on our lives such as autonomous driving, robotics, AR/VR and smart cities</description>
	<lastBuildDate>Tue, 17 Mar 2026 03:03:37 +0000</lastBuildDate>
	<language>en-US</language>
	<sy:updatePeriod>
	hourly	</sy:updatePeriod>
	<sy:updateFrequency>
	1	</sy:updateFrequency>
	<generator>https://wordpress.org/?v=5.8.13</generator>

 
<site xmlns="com-wordpress:feed-additions:1">179852210</site>	<item>
		<title>Vecow Launches Turn-Key AMR Development Kit Integrating Kudan’s Spatial Perception Technology to Advance Physical AI in Robotics</title>
		<link>https://www.kudan.io/blog/vecow-launches-turn-key-amr-development-kit-integrating-kudans-spatial-perception-technology/?utm_source=rss&#038;utm_medium=rss&#038;utm_campaign=vecow-launches-turn-key-amr-development-kit-integrating-kudans-spatial-perception-technology</link>
		
		<dc:creator><![CDATA[user]]></dc:creator>
		<pubDate>Tue, 17 Mar 2026 03:00:20 +0000</pubDate>
				<category><![CDATA[Press Release]]></category>
		<category><![CDATA[AI robotics research]]></category>
		<category><![CDATA[AI-driven autonomy]]></category>
		<category><![CDATA[AI-enabled machines]]></category>
		<category><![CDATA[algorithm validation]]></category>
		<category><![CDATA[AMR development kit]]></category>
		<category><![CDATA[AMR navigation]]></category>
		<category><![CDATA[Autonomous Mobile Robot]]></category>
		<category><![CDATA[Autonomous Mobile Robots]]></category>
		<category><![CDATA[autonomous mobility technology]]></category>
		<category><![CDATA[Autonomous navigation]]></category>
		<category><![CDATA[autonomous robotics]]></category>
		<category><![CDATA[commercial robotics]]></category>
		<category><![CDATA[Dynamic Environments]]></category>
		<category><![CDATA[edge AI computing]]></category>
		<category><![CDATA[embedded computing]]></category>
		<category><![CDATA[full-stack robotics platform]]></category>
		<category><![CDATA[industrial edge computing]]></category>
		<category><![CDATA[industrial inspection]]></category>
		<category><![CDATA[industrial robotics]]></category>
		<category><![CDATA[intelligent machines]]></category>
		<category><![CDATA[Japan IT Week Spring 2026]]></category>
		<category><![CDATA[Kudan]]></category>
		<category><![CDATA[Kudan navigation technology]]></category>
		<category><![CDATA[Kudan spatial perception]]></category>
		<category><![CDATA[LLM robotics]]></category>
		<category><![CDATA[Localization and Mapping]]></category>
		<category><![CDATA[Logistics automation]]></category>
		<category><![CDATA[modular robotics platform]]></category>
		<category><![CDATA[multimodal perception]]></category>
		<category><![CDATA[natural language interaction]]></category>
		<category><![CDATA[navigation algorithms]]></category>
		<category><![CDATA[next-generation robotics]]></category>
		<category><![CDATA[NVIDIA]]></category>
		<category><![CDATA[NVIDIA GTC 2026]]></category>
		<category><![CDATA[perception sensors]]></category>
		<category><![CDATA[Physical AI]]></category>
		<category><![CDATA[physical world AI]]></category>
		<category><![CDATA[rapid prototyping robotics]]></category>
		<category><![CDATA[real-world robot testing]]></category>
		<category><![CDATA[robot localization]]></category>
		<category><![CDATA[robot mapping]]></category>
		<category><![CDATA[robot navigation]]></category>
		<category><![CDATA[robotics AI]]></category>
		<category><![CDATA[robotics development environment]]></category>
		<category><![CDATA[robotics development platform]]></category>
		<category><![CDATA[robotics ecosystem]]></category>
		<category><![CDATA[robotics innovation]]></category>
		<category><![CDATA[robotics integration]]></category>
		<category><![CDATA[robotics live demonstration]]></category>
		<category><![CDATA[robotics semantic perception]]></category>
		<category><![CDATA[robotics system development]]></category>
		<category><![CDATA[scalable robotics architecture]]></category>
		<category><![CDATA[semantic AI]]></category>
		<category><![CDATA[Semantic Understanding]]></category>
		<category><![CDATA[smart infrastructure]]></category>
		<category><![CDATA[Spatial Perception]]></category>
		<category><![CDATA[turn-key robotics platform]]></category>
		<category><![CDATA[Vecow]]></category>
		<category><![CDATA[Vecow edge AI]]></category>
		<category><![CDATA[VTK AMR Dev Kit]]></category>
		<guid isPermaLink="false">https://www.kudan.io/?p=2178</guid>

					<description><![CDATA[<p>– The advanced robotics platform combines edge AI computing, spatial perception, navigation, and semantic AI capabilities to accelerate Physical AI in next-generation autonomous machines – Kudan Inc. (hereinafter “Kudan”) today announced that its partner, Vecow Co., Ltd. (hereinafter “Vecow”), a global leader in embedded computing and edge AI solutions, has officially launched the VTK AMR [&#8230;]</p>
<p>The post <a href="https://www.kudan.io/blog/vecow-launches-turn-key-amr-development-kit-integrating-kudans-spatial-perception-technology/">Vecow Launches Turn-Key AMR Development Kit Integrating Kudan’s Spatial Perception Technology to Advance Physical AI in Robotics</a> first appeared on <a href="https://www.kudan.io">Kudan global</a>.</p>]]></description>
										<content:encoded><![CDATA[<h1><em><strong>– The advanced robotics platform combines edge AI computing, spatial perception, navigation, and semantic AI capabilities to accelerate Physical AI in next-generation autonomous machines –</strong></em></h1>
<p>Kudan Inc. (hereinafter “Kudan”) today announced that its partner, Vecow Co., Ltd. (hereinafter “Vecow”), a global leader in embedded computing and edge AI solutions, has officially launched the VTK AMR Dev Kit, a turn-key development platform for Autonomous Mobile Robots (AMRs) that integrates Kudan’s Spatial Perception and Robot Navigation technology.</p>
<p><img loading="lazy" class="aligncenter size-full wp-image-2182" src="https://i0.wp.com/www.kudan.io/wp-content/uploads/2026/03/画像3.png?resize=1255%2C703" alt="" width="1255" height="703" srcset="https://i0.wp.com/www.kudan.io/wp-content/uploads/2026/03/画像3.png?w=1255&amp;ssl=1 1255w, https://i0.wp.com/www.kudan.io/wp-content/uploads/2026/03/画像3.png?resize=300%2C168&amp;ssl=1 300w, https://i0.wp.com/www.kudan.io/wp-content/uploads/2026/03/画像3.png?resize=1024%2C574&amp;ssl=1 1024w, https://i0.wp.com/www.kudan.io/wp-content/uploads/2026/03/画像3.png?resize=768%2C430&amp;ssl=1 768w" sizes="(max-width: 1000px) 100vw, 1000px" data-recalc-dims="1" /></p>
<h2><strong>■ A Pre-Integrated Platform for Next-Generation Autonomous Robotics</strong></h2>
<p>The VTK AMR Dev Kit (EDR-1000) combines advanced edge computing, multimodal perception, and AI-driven autonomy into a unified robotics development environment. It is specifically designed to accelerate the realization of Physical AI, a new generation of intelligent machines capable of understanding and interacting with the physical world.</p>
<p>Traditionally, developing autonomous mobile robots requires integrating numerous complex subsystems, including perception sensors, computing hardware, and navigation algorithms. These integration challenges often slow development cycles and increase engineering complexity.</p>
<p>The VTK AMR Dev Kit addresses these hurdles by providing a pre-integrated, full-stack robotics development platform that significantly simplifies system integration and accelerates:</p>
<p>Rapid Prototyping: Streamlining the path from concept to functional robot.<br />
Algorithm Validation: Enabling robust testing in real-world environments.<br />
System Development: Offering a scalable, modular architecture for AI-enabled machines.</p>
<h2><strong>Product Introduction Video</strong></h2>
<p><iframe loading="lazy" title="Introduction of Vecow VTK AMR Dev Kit (EDR-1000), integrating Kudan’s Spatial Perception Technology" width="500" height="281" src="https://www.youtube.com/embed/jeS6a0a9m5w?feature=oembed" frameborder="0" allow="accelerometer; autoplay; clipboard-write; encrypted-media; gyroscope; picture-in-picture; web-share" referrerpolicy="strict-origin-when-cross-origin" allowfullscreen></iframe></p>
<h2><strong>Product Introduction Site by Vecow</strong></h2>
<p><a href="https://www.vecow.com/dispPageBox/vecow/VecowCT.aspx?ddsPageID=PRODUCTDTL_EN&amp;dbid=5357113499" target="_blank" rel="noopener">https://www.vecow.com/dispPageBox/vecow/VecowCT.aspx?ddsPageID=PRODUCTDTL_EN&amp;dbid=5357113499</a></p>
<h2><strong>■ Core Technologies and Strategic Collaboration</strong></h2>
<p>Through the integration of Kudan’s core technology, the platform enables robots to reliably localize, map, perceive, and navigate complex, dynamic environments—forming a critical foundation for industrial and commercial operations.</p>
<p>Furthermore, in collaboration with NVIDIA, the platform enables advanced AI capabilities, including:</p>
<ul>
<li><strong>Semantic Understanding:</strong> Moving beyond coordinates to understand environmental context.</li>
<li><strong>Natural Language Interaction:</strong> Powered by Large Language Models (LLMs) for intuitive task-based operation.</li>
</ul>
<p>By combining Vecow’s expertise in industrial edge computing with Kudan’s Perception and Navigation capabilities, the VTK AMR Dev Kit provides an integrated foundation for developers, system integrators, and research institutions. The platform supports a wide range of applications, from logistics automation and industrial inspection to smart infrastructure and next-generation AI robotics research.</p>
<p><img loading="lazy" class="aligncenter size-full wp-image-2183" src="https://i0.wp.com/www.kudan.io/wp-content/uploads/2026/03/画像4.png?resize=1395%2C600" alt="" width="1395" height="600" srcset="https://i0.wp.com/www.kudan.io/wp-content/uploads/2026/03/画像4.png?w=1395&amp;ssl=1 1395w, https://i0.wp.com/www.kudan.io/wp-content/uploads/2026/03/画像4.png?resize=300%2C129&amp;ssl=1 300w, https://i0.wp.com/www.kudan.io/wp-content/uploads/2026/03/画像4.png?resize=1024%2C440&amp;ssl=1 1024w, https://i0.wp.com/www.kudan.io/wp-content/uploads/2026/03/画像4.png?resize=768%2C330&amp;ssl=1 768w" sizes="(max-width: 1000px) 100vw, 1000px" data-recalc-dims="1" /></p>
<h2><strong>■Accelerating the Physical AI Ecosystem</strong></h2>
<p>The launch of the VTK AMR Dev Kit represents a pivotal step toward building the ecosystem required for Physical AI. By integrating edge computing, perception technologies, and advanced AI models, it empowers robots to truly understand and interact with their surroundings.</p>
<p>As the global demand for intelligent automation grows, integrated platforms like the VTK AMR Dev Kit will play a key role in accelerating robotics innovation and enabling the widespread adoption of Physical AI technologies worldwide.</p>
<h2><strong>■Exhibition Information: Live Demonstration Invitations</strong></h2>
<p>The VTK AMR Dev Kit announced in this release will be showcased at the following exhibitions. We invite you to experience next-generation autonomous mobility technology firsthand through live demonstrations and physical displays of the AMR development kit.</p>
<p><span style="text-decoration: underline;"><strong>NVIDIA GTC 2026</strong></span></p>
<ul>
<li><strong>Dates:</strong> Monday, March 16 – Thursday, March 19, 2026</li>
<li><strong>Venue:</strong> San Jose Convention Center (San Jose, CA, USA)</li>
<li><strong>B</strong><strong>ooth:</strong> #113</li>
</ul>
<p><span style="text-decoration: underline;"><strong>Japan IT Week Spring 2026</strong></span></p>
<ul>
<li><strong>Dates:</strong> Wednesday, April 8 – Friday, April 10, 2026</li>
<li><strong>Venue:</strong> Tokyo Big Sight (West Halls 3 &amp; 4)</li>
<li><strong>Booth:</strong> W23-43</li>
</ul>
<p>We look forward to seeing you at the events.</p>
<p><strong>About Kudan Inc.</strong><br />
Kudan provides spatial perception technology, described as the &#8220;eyes for all machines&#8221;, that drives the advancement of Physical AI and plays a pivotal role in next-generation digital twins and robotics. By perceiving the real world to create digital twins that AI can understand, Kudan contributes to the digital transformation (DX) of operational management and significant productivity gains. Furthermore, the company offers fundamental technologies that enable robots to digitally perceive their surroundings and achieve autonomous mobility in complex environments.<br />
For more information, please refer to Kudan’s website at <a href="https://www.kudan.io/" target="_blank" rel="noopener">https://www.kudan.io/</a>.</p>
<p>■Company Details<br />
Name: Kudan Inc.<br />
Securities Code: 4425 (TSE Growth)<br />
Representative: CEO Daiu Ko</p>
<p>■Contact Information<br />
For more details, please contact us from <a href="https://www.kudan.io/contact" target="_blank" rel="noopener">here</a></p><p>The post <a href="https://www.kudan.io/blog/vecow-launches-turn-key-amr-development-kit-integrating-kudans-spatial-perception-technology/">Vecow Launches Turn-Key AMR Development Kit Integrating Kudan’s Spatial Perception Technology to Advance Physical AI in Robotics</a> first appeared on <a href="https://www.kudan.io">Kudan global</a>.</p>]]></content:encoded>
					
		
		
		<post-id xmlns="com-wordpress:feed-additions:1">2178</post-id>	</item>
		<item>
		<title>Yours Technologies has decided to commercially launch its next-generation AMR, integrating Kudan SLAM with its multi-algorithm fusion system</title>
		<link>https://www.kudan.io/blog/yours-tech-launches-next-gen-amr-with-kudan-slam-integration/?utm_source=rss&#038;utm_medium=rss&#038;utm_campaign=yours-tech-launches-next-gen-amr-with-kudan-slam-integration</link>
		
		<dc:creator><![CDATA[user]]></dc:creator>
		<pubDate>Wed, 21 Aug 2024 01:00:23 +0000</pubDate>
				<category><![CDATA[Press Release]]></category>
		<category><![CDATA[Advanced mapping technology]]></category>
		<category><![CDATA[AI-driven human-robot interaction]]></category>
		<category><![CDATA[AI-powered delivery robots]]></category>
		<category><![CDATA[AMR]]></category>
		<category><![CDATA[Autonomous delivery systems]]></category>
		<category><![CDATA[Autonomous Mobile Robots]]></category>
		<category><![CDATA[Autonomous robot navigation]]></category>
		<category><![CDATA[Kudan SLAM]]></category>
		<category><![CDATA[Last-mile delivery robots]]></category>
		<category><![CDATA[Multi-sensor fusion]]></category>
		<category><![CDATA[Next-generation AMR]]></category>
		<category><![CDATA[Retail logistics automation]]></category>
		<category><![CDATA[Retail robotics]]></category>
		<category><![CDATA[Robot positioning systems]]></category>
		<category><![CDATA[Smart logistics solutions]]></category>
		<category><![CDATA[Yours Technologies]]></category>
		<guid isPermaLink="false">https://www.kudan.io/?p=1884</guid>

					<description><![CDATA[<p>Kudan Inc. (headquarters in Shibuya-ku, Tokyo; CEO Daiu Ko) is pleased to announce that Beijing Yours Technologies Co. Ltd. (headquarters in Beijing, China; CEO Peisen Lin, hereafter “Yours Technologies”), a leading company in last-one-mile delivery robot solution provider for next-generation retailing and logistics, has decided to commercially release a new generation of its AMR, where [&#8230;]</p>
<p>The post <a href="https://www.kudan.io/blog/yours-tech-launches-next-gen-amr-with-kudan-slam-integration/">Yours Technologies has decided to commercially launch its next-generation AMR, integrating Kudan SLAM with its multi-algorithm fusion system</a> first appeared on <a href="https://www.kudan.io">Kudan global</a>.</p>]]></description>
										<content:encoded><![CDATA[<p>Kudan Inc. (headquarters in Shibuya-ku, Tokyo; CEO Daiu Ko) is pleased to announce that Beijing Yours Technologies Co. Ltd. (headquarters in Beijing, China; CEO Peisen Lin, hereafter “Yours Technologies”), a leading company in last-one-mile delivery robot solution provider for next-generation retailing and logistics, has decided to commercially release a new generation of its AMR, where Kudan&#8217;s multi-sensor fusion-based SLAM software is integrated into Yours Technologies’ proprietary multi-algorithm system to provide accurate and robust positioning capability.</p>
<p><img loading="lazy" class="size-full wp-image-1886 aligncenter" src="https://i0.wp.com/www.kudan.io/wp-content/uploads/2024/08/PIC1.png?resize=967%2C263&#038;ssl=1" alt="" width="967" height="263" srcset="https://i0.wp.com/www.kudan.io/wp-content/uploads/2024/08/PIC1.png?w=967&amp;ssl=1 967w, https://i0.wp.com/www.kudan.io/wp-content/uploads/2024/08/PIC1.png?resize=300%2C82&amp;ssl=1 300w, https://i0.wp.com/www.kudan.io/wp-content/uploads/2024/08/PIC1.png?resize=768%2C209&amp;ssl=1 768w" sizes="(max-width: 967px) 100vw, 967px" data-recalc-dims="1" /><br />
Established in 2018, Yours Technologies has emerged as a forefront provider of autonomous delivery robot solutions, securing investments from numerous global funds, including multiple commitments from Global Brain Corporation from Japan, through its KURONEKO Innovation Fund (KURONEKO Fund). Over the past five years, Yours Technologies has achieved significant milestones, deploying over 1,200 units of its Autonomous Mobile Robots (AMRs) across more than 110 cities. Initially focusing on servicing areas within a 3-kilometer radius of commercial facilities, Yours Technologies has recognized the increasing market demand for its solutions in diverse and challenging environments. This demand encompasses operations that require navigation through frequently changing sceneries and seamless transition between indoor and outdoor spaces.</p>
<p>To address these evolving operational needs, a more resilient Simultaneous Localization and Mapping (SLAM) processing fusion is needed. This necessity has paved the way for a crucial product development partnership between Yours Technologies and Kudan. The collaboration aims to enhance the capabilities of Yours Technologies&#8217; AMRs, enabling them to operate efficiently in complex scenarios, thereby meeting the growing market demands for versatile and reliable robotic delivery solutions.</p>
<p>Leveraging Kudan’s multi-sensor fusion SLAM algorithm, which combines the input data from affordable 3D-LiDAR and stereo cameras, the new Yours Technologies’ AMR model would function with high precision and reliability across a range of dynamic mapping scenarios. Remarkably, this performance is achieved using cost-effective computing platforms, ensuring superior performance and affordability. Additionally, Kudan SLAM’s advanced map creation and updating capabilities, essential for autonomous robot operation, further enable Yours Technologies’ AMR to be deployed and operated at reduced operational and maintenance expenses for the end-users. The new generation AMRs have been successfully deployed in some highly complex and demanding commercial facilities across China as trial activities for several months, and their precise and reliable operational performance is proven to be exceptional.</p>
<p>Beyond their technological partnership, the two companies will join forces in business development, sales and field services for market introduction beyond China, including deployment planned in the Japan market in the upcoming months. We are committed to offering best-in-class, economically viable AMR solutions and pioneering business models designed to open up new sources of revenue for retailers and logistics service providers.</p>
<p><strong>Comment from both companies:</strong></p>
<p>&#8220;We’re delighted to have Kudan partnering up with us and share our workload with their latest multi-sensor fusion they have rigourously tested over the years. At Yours Technologies, our mission is to deliver robots that navigate tough complex real-world environments, and to engage meaningfully with people through AI-Agent driven interactions. By merging Kudan’s expertise with our own, we strengthen the performance and adaptability of our robots in complex spaces while continuing to innovate in AI-driven human-robot interactions.”</p>
<p>― Peisen Lin, Chief Executive Officer at Yours Technologies</p>
<p>&#8220;We are thrilled to collaborate with Yours Technologies in bringing their next generation, innovative AMR solutions to market. By integrating Kudan&#8217;s multi-sensor fusion-based SLAM software, we provide the precision and robustness to navigate complex and dynamic environments. said Daiu Ko, CEO of Kudan, “This partnership not only highlights the technological advancements we have achieved but also underscores our commitment to enhancing the efficiency and reliability of autonomous delivery solutions. We look forward to the positive impact of this collaboration on the retail and logistics sectors globally.&#8221;</p>
<p>― Daiu Ko, Chief Executive Officer at Kudan</p>
<p><strong>About Beijing Yours Technologies Co. Ltd.</strong><br />
Yours Technologies is using autonomous robots to help businesses in the physical world unlock new opportunities. Yours Robot now operates in over 110 locations with clients from the Fortune 500 companies. Powered by L4 self-driving technology and AI Agent framework, Yours Robot combines 3D LIDAR and vision fusion to navigate complex real word environments. For more information, please refer to Yours Technologies’ website at <a href="https://www.yours.xyz" target="_blank" rel="noopener">https://www.yours.xyz</a></p>
<p><strong>About Kudan Inc.</strong><br />
Kudan is a deep tech research and development company specializing in algorithms for artificial perception (AP). As a complement to artificial intelligence (AI), AP functions allow machines to develop autonomy. Currently, Kudan is licensing its technology for next-generation solution areas such as digital twin, robotics and autonomous driving.<br />
For more information, please refer to Kudan’s website at <a href="https://www.kudan.io/" target="_blank" rel="noopener">https://www.kudan.io/</a>.</p>
<p>■Company Details<br />
Name: Kudan Inc.<br />
Securities Code: 4425 (TSE Growth)<br />
Representative: CEO Daiu Ko</p>
<p>■For more details, please contact us from <a href="https://www.kudan.io/contact" target="_blank" rel="noopener">here</a>.</p><p>The post <a href="https://www.kudan.io/blog/yours-tech-launches-next-gen-amr-with-kudan-slam-integration/">Yours Technologies has decided to commercially launch its next-generation AMR, integrating Kudan SLAM with its multi-algorithm fusion system</a> first appeared on <a href="https://www.kudan.io">Kudan global</a>.</p>]]></content:encoded>
					
		
		
		<post-id xmlns="com-wordpress:feed-additions:1">1884</post-id>	</item>
		<item>
		<title>Vecow has decided to release the Robot Solution Kit with Kudan’s Artificial Perception technology</title>
		<link>https://www.kudan.io/blog/vecow-has-decided-to-release-the-robot-solution-kit-with-kudans-artificial-perception-technology/?utm_source=rss&#038;utm_medium=rss&#038;utm_campaign=vecow-has-decided-to-release-the-robot-solution-kit-with-kudans-artificial-perception-technology</link>
		
		<dc:creator><![CDATA[user]]></dc:creator>
		<pubDate>Tue, 02 Jul 2024 03:30:24 +0000</pubDate>
				<category><![CDATA[Press Release]]></category>
		<category><![CDATA[3D mapping]]></category>
		<category><![CDATA[AMR]]></category>
		<category><![CDATA[Artificial Pereception]]></category>
		<category><![CDATA[Autonomous Mobile Robots]]></category>
		<category><![CDATA[autonomous vehicles]]></category>
		<category><![CDATA[digital twin]]></category>
		<category><![CDATA[Kudan]]></category>
		<category><![CDATA[Kudan SLAM]]></category>
		<category><![CDATA[Positioning]]></category>
		<category><![CDATA[SLAM]]></category>
		<category><![CDATA[Vecow]]></category>
		<guid isPermaLink="false">https://www.kudan.io/?p=1857</guid>

					<description><![CDATA[<p>Kudan is pleased to announce that Vecow Co., Ltd. (hereafter “Vecow”), a strategic partner of Kudan and a team of global embedded experts with its headquarter based in Taiwan, has decided to release the product of Vecow’s Turnkey SLAM Kit (hereafter “VTK SLAM Kit”), which embedded Kudan’s artificial perception technology. For the release from Vecow, [&#8230;]</p>
<p>The post <a href="https://www.kudan.io/blog/vecow-has-decided-to-release-the-robot-solution-kit-with-kudans-artificial-perception-technology/">Vecow has decided to release the Robot Solution Kit with Kudan’s Artificial Perception technology</a> first appeared on <a href="https://www.kudan.io">Kudan global</a>.</p>]]></description>
										<content:encoded><![CDATA[<p>Kudan is pleased to announce that Vecow Co., Ltd. (hereafter “Vecow”), a strategic partner of Kudan and a team of global embedded experts with its headquarter based in Taiwan, has decided to release the product of Vecow’s Turnkey SLAM Kit (hereafter “VTK SLAM Kit”), which embedded Kudan’s artificial perception technology. For the release from Vecow, please refer to <a href="https://www.vecow.com/dispPageBox/vecow/VecowCP.aspx?ddsPageID=NEWS_EN&amp;dbid=5111103016" target="_blank" rel="noopener">here</a>.</p>
<p><img loading="lazy" class="aligncenter wp-image-1859" src="https://i0.wp.com/www.kudan.io/wp-content/uploads/2024/07/Pic1.png?resize=550%2C338&#038;ssl=1" alt="" width="550" height="338" srcset="https://i0.wp.com/www.kudan.io/wp-content/uploads/2024/07/Pic1.png?w=852&amp;ssl=1 852w, https://i0.wp.com/www.kudan.io/wp-content/uploads/2024/07/Pic1.png?resize=300%2C185&amp;ssl=1 300w, https://i0.wp.com/www.kudan.io/wp-content/uploads/2024/07/Pic1.png?resize=768%2C472&amp;ssl=1 768w" sizes="(max-width: 550px) 100vw, 550px" data-recalc-dims="1" /></p>
<p>Kudan has been actively engaging in joint product development and promotion with Vecow, starting with <a href="https://contents.xj-storage.jp/xcontents/AS02977/c8b41bf9/5a0a/40cc/8b83/6ba754fc9dc3/20201109001312537s.pdf" target="_blank" rel="noopener">the establishment of a strategic partnership</a> in 2020 and <a href="https://contents.xj-storage.jp/xcontents/AS02977/c9f95e82/4b4f/4867/9c43/e7bc57acbdf8/140120230804534983.pdf" target="_blank" rel="noopener">the launch of a development kit for digital twins</a> integrating both companies&#8217; core technologies in 2023.</p>
<p>This time, in addition to providing technology for digital twins, Vecow is launching the VTK SLAM Kit, a solution kit for autonomous mobile robots, which is experiencing rapid growth in demand.</p>
<p>The VTK SLAM Kit, optimized for various autonomous vehicles and Real-Time Location Systems (RTLS), is an industrial-grade software-hardware integrated kit. By integrating KudanSLAM’s industry-leading accuracy and stability in positioning and 3D mapping, the VTK SLAM Kit enables the implementation of diverse robotic solutions that were previously challenging to implement with conventional spatial perception technologies, thereby accelerating the societal implementation of robotics.</p>
<p><img loading="lazy" class="aligncenter wp-image-1860 size-full" src="https://i0.wp.com/www.kudan.io/wp-content/uploads/2024/07/Pic2.png?resize=467%2C288&#038;ssl=1" alt="" width="467" height="288" srcset="https://i0.wp.com/www.kudan.io/wp-content/uploads/2024/07/Pic2.png?w=467&amp;ssl=1 467w, https://i0.wp.com/www.kudan.io/wp-content/uploads/2024/07/Pic2.png?resize=300%2C185&amp;ssl=1 300w" sizes="(max-width: 467px) 100vw, 467px" data-recalc-dims="1" /></p>
<p>Solution Highlights of VTK SLAM Kit:</p>
<ul>
<li>3D-LiDAR based SLAM or Visual/camera based SLAM software is pre-installed for mapping and positioning functionalities.</li>
<li>Capable for multi-sensor fusion, including IMU, GNSS, Wheel Odometry, 2D-LiDAR, multiple cameras or multiple 3D-LiDARs</li>
<li>Quickly access to best-in-class localization and mapping technology, and help accelerate the development of products and solutions that need accurate and reliable position tracking in both indoor and outdoor environments.</li>
<li>Perpetual software license is granted. Users can use the solution kit without time limitations.</li>
</ul>
<p><img loading="lazy" class="size-full wp-image-1861 aligncenter" src="https://i0.wp.com/www.kudan.io/wp-content/uploads/2024/07/Screenshot-2024-07-02-194807.png?resize=821%2C193&#038;ssl=1" alt="" width="821" height="193" srcset="https://i0.wp.com/www.kudan.io/wp-content/uploads/2024/07/Screenshot-2024-07-02-194807.png?w=821&amp;ssl=1 821w, https://i0.wp.com/www.kudan.io/wp-content/uploads/2024/07/Screenshot-2024-07-02-194807.png?resize=300%2C71&amp;ssl=1 300w, https://i0.wp.com/www.kudan.io/wp-content/uploads/2024/07/Screenshot-2024-07-02-194807.png?resize=768%2C181&amp;ssl=1 768w" sizes="(max-width: 821px) 100vw, 821px" data-recalc-dims="1" /></p>
<p>Comment from both companies:<br />
Partnering with Vecow on the VTK SLAM Kit leverages our advanced SLAM technology to offer quick access to best-in-class mapping and localization solution for navigation in complex environments. This collaboration underscores our commitment to delivering cutting-edge SLAM software that meets the evolving needs of industries such as robotics and autonomous systems. By integrating our technology with Vecow’s sophisticated hardware, we are paving the way for enhanced innovation and efficiency in automation applications.</p>
<p>― Tian Hao, Chief Operating Officer at Kudan</p>
<p>Vecow and Kudan have a well-established history of collaboration in AMR applications. This expanded partnership takes advantage of Kudan’s specialized SLAM technologies. By integrating Vecow’s high-performance system with Kudan’s technologies, we have enhanced the VTK SLAM kit. This integration will enable our customers in the mobile robotics sector to achieve faster time-to-market. Additionally, we are committed to further increasing the adoption of the VTK solution as a key outcome of this partnership.</p>
<p>― Bryan Huang, Product Manager at Vecow</p>
<p>Please refer to link below for product details:<br />
<a href="https://www.vecow.com/dispPageBox/vecow/VecowCT.aspx?ddsPageID=PRODUCTDTL_EN&amp;dbid=5470100185" target="_blank" rel="noopener">https://www.vecow.com/dispPageBox/vecow/VecowCT.aspx?ddsPageID=PRODUCTDTL_EN&amp;dbid=5470100185</a></p>
<p><strong>About Vecow Co., Ltd.</strong><br />
Vecow is a team of global embedded experts and we aim to be your trusted embedded business partner. Vecow is committed to designing, developing, producing, and supplying high quality AIoT solutions with trusted reliability, advanced technology, and innovative concepts. Our products include: AI-ready Inference Systems, AI Computing Systems, Fanless Embedded Systems, Vehicle Computing Systems, Robust Computing Systems, Single Board Computers, Multi-Touch Computers/Displays, Frame Grabbers, Embedded Peripherals and Design &amp; Manufacturing Services for Machine Vision, Autonomous Vehicle, Smart Robotics, Digital Rail, Public Security, Transportation &amp; V2X, Smart Factory, Deep Learning, and any Edge AI applications.<br />
Vecow is a subsidiary of Ennoconn Corporation (6414. TW) in Taiwan, a global leader of industrial hardware systems and solutions provider to various vertical market applications. Ennoconn Corporation is a member of the Foxconn Group.</p>
<p>For more information, please refer to Vecow’s website at<br />
<a href="https://www.vecow.com/dispPageBox/vecow/VecowHp.aspx?ddsPageID=VECOW_EN" target="_blank" rel="noopener">https://www.vecow.com/dispPageBox/vecow/VecowHp.aspx?ddsPageID=VECOW_EN</a></p>
<p><strong>About Kudan Inc.</strong><br />
Kudan is a deep tech research and development company specializing in algorithms for artificial perception (AP). As a complement to artificial intelligence (AI), AP functions allow machines to develop autonomy. Currently, Kudan is licensing its technology for next-generation solution areas such as digital twin, robotics and autonomous driving.<br />
For more information, please refer to Kudan’s website at <a href="https://www.kudan.io/" target="_blank" rel="noopener">https://www.kudan.io/</a>.</p>
<p>■Company Details<br />
Name: Kudan Inc.<br />
Securities Code: 4425 (TSE Growth)<br />
Representative: CEO Daiu Ko</p>
<p>■For more details, please contact us from <a href="https://www.kudan.io/contact" target="_blank" rel="noopener">here</a>.</p><p>The post <a href="https://www.kudan.io/blog/vecow-has-decided-to-release-the-robot-solution-kit-with-kudans-artificial-perception-technology/">Vecow has decided to release the Robot Solution Kit with Kudan’s Artificial Perception technology</a> first appeared on <a href="https://www.kudan.io">Kudan global</a>.</p>]]></content:encoded>
					
		
		
		<post-id xmlns="com-wordpress:feed-additions:1">1857</post-id>	</item>
		<item>
		<title>Kudan’s Visual SLAM Technology Completes Integration with NVIDIA Isaac Perceptor</title>
		<link>https://www.kudan.io/blog/kudan-visual-slam-technology-completes-integration-with-nvidia-isaac-perceptor/?utm_source=rss&#038;utm_medium=rss&#038;utm_campaign=kudan-visual-slam-technology-completes-integration-with-nvidia-isaac-perceptor</link>
		
		<dc:creator><![CDATA[user]]></dc:creator>
		<pubDate>Mon, 03 Jun 2024 07:00:13 +0000</pubDate>
				<category><![CDATA[Press Release]]></category>
		<category><![CDATA[AMR]]></category>
		<category><![CDATA[Autonomous Mobile Robots]]></category>
		<category><![CDATA[Isaac ROS 2 software package]]></category>
		<category><![CDATA[KdVisual]]></category>
		<category><![CDATA[Kudan]]></category>
		<category><![CDATA[Kudan Visual SLAM]]></category>
		<category><![CDATA[NVIDIA]]></category>
		<category><![CDATA[NVIDIA Isaac Perceptor]]></category>
		<category><![CDATA[Simultaneous Localization and Mapping]]></category>
		<category><![CDATA[SLAM]]></category>
		<guid isPermaLink="false">https://www.kudan.io/?p=1820</guid>

					<description><![CDATA[<p>Kudan is pleased to announce that Kudan’s Visual SLAM (VSLAM) technology is integrated with NVIDIA Isaac Perceptor, an Isaac ROS 2 software package for developing autonomous mobile robots (AMRs). NVIDIA Isaac Perceptor is a collection of hardware-accelerated packages for visual AI, tailored for Autonomous Mobile Robot (AMR) to perceive, localize, and operate robustly in unstructured [&#8230;]</p>
<p>The post <a href="https://www.kudan.io/blog/kudan-visual-slam-technology-completes-integration-with-nvidia-isaac-perceptor/">Kudan’s Visual SLAM Technology Completes Integration with NVIDIA Isaac Perceptor</a> first appeared on <a href="https://www.kudan.io">Kudan global</a>.</p>]]></description>
										<content:encoded><![CDATA[<p>Kudan is pleased to announce that Kudan’s Visual SLAM (VSLAM) technology is integrated with NVIDIA Isaac Perceptor, an Isaac ROS 2 software package for developing autonomous mobile robots (AMRs).</p>
<p>NVIDIA Isaac Perceptor is a collection of hardware-accelerated packages for visual AI, tailored for Autonomous Mobile Robot (AMR) to perceive, localize, and operate robustly in unstructured environments.</p>
<p>With this collaboration, Kudan’s CUDA-accelerated VSLAM significantly enhances its performance by leveraging the 3D perception functions of NVIDIA Isaac Perceptor, including AI-based depth perception and real-time 3D reconstruction with a cost map. Using outputs from Kudan VSLAM, NVIDIA Isaac Perceptor can elevate localization performance in environments with dynamic objects and frequent scenery changes.</p>
<p>This has been successfully demonstrated using NVIDIA’s Nova Carter robot, confirming the value of real-time 3D occupancy maps in enhancing the VSLAM approach for mapping and localization of mobile robots. The combination of Kudan VSLAM with NVIDIA Isaac Perceptor offers a sophisticated, visual-based perception and navigation solution for AMR developers, helping enable reliable localization, obstacle detection, and navigation capabilities in complex scenarios.</p>
<p>To view the demonstration of Kudan Visual SLAM integrated with Isaac Perceptor, please visit:</p>
<p><strong>DEMO VIDEO</strong></p>
<p><iframe loading="lazy" title="YouTube video player" src="https://www.youtube.com/embed/g4Fei7FcaM8?si=Jv_iXUuF4nSTjrz_" width="560" height="315" frameborder="0" allowfullscreen="allowfullscreen"></iframe></p>
<p>Kudan will continue to deliver top-notch, best-in-class perception and navigation solutions, addressing the challenges faced by AMR developers and accelerating the deployment of AMRs in various environments.</p>
<p><strong>Comments from both companies:</strong></p>
<p>“We are thrilled to collaborate with NVIDIA in integrating our Visual SLAM technology with Isaac Perceptor, marking a significant leap forward in providing AMR developers with AI-based perception powered by NVIDIA AI and accelerated computing. We look forward to continuing our work with NVIDIA to push the boundaries of what’s possible in robotics.”</p>
<p><strong>&#8211; Daiu Ko, CEO of Kudan Inc.</strong></p>
<p>“AI-driven technologies are significantly advancing autonomous mobile robots. Kudan’s Visual SLAM technology integrated with Isaac Perceptor enables AMRs to navigate and operate in unstructured environments with greater precision and reliability.”</p>
<p><strong>&#8211; Deepu Talla, Vice President of Robotics and Edge Computing, NVIDIA</strong></p>
<p><strong>About Kudan Inc.</strong><br />
Kudan is a deep tech research and development company specializing in algorithms for artificial perception (AP). As a complement to artificial intelligence (AI), AP functions allow machines to develop autonomy. Currently, Kudan is licensing its technology for next-generation solution areas such as digital twin, robotics and autonomous driving.<br />
For more information, please refer to Kudan’s website at <a href="https://www.kudan.io/" target="_blank" rel="noopener">https://www.kudan.io/</a>.</p>
<p>■Company Details<br />
Name: Kudan Inc.<br />
Securities Code: 4425 (TSE Growth)<br />
Representative: CEO Daiu Ko</p>
<p>■For more details, please contact us from <a href="https://www.kudan.io/contact" target="_blank" rel="noopener">here</a>.</p><p>The post <a href="https://www.kudan.io/blog/kudan-visual-slam-technology-completes-integration-with-nvidia-isaac-perceptor/">Kudan’s Visual SLAM Technology Completes Integration with NVIDIA Isaac Perceptor</a> first appeared on <a href="https://www.kudan.io">Kudan global</a>.</p>]]></content:encoded>
					
		
		
		<post-id xmlns="com-wordpress:feed-additions:1">1820</post-id>	</item>
		<item>
		<title>Understanding Covariance Quality in Robot Localisation</title>
		<link>https://www.kudan.io/blog/understanding-covariance-quality-in-robot-localisation/?utm_source=rss&#038;utm_medium=rss&#038;utm_campaign=understanding-covariance-quality-in-robot-localisation</link>
		
		<dc:creator><![CDATA[user]]></dc:creator>
		<pubDate>Tue, 27 Feb 2024 02:20:09 +0000</pubDate>
				<category><![CDATA[Tech Blog]]></category>
		<category><![CDATA[Autonomous Driving]]></category>
		<category><![CDATA[autonomous mobile industrial robots]]></category>
		<category><![CDATA[Autonomous Mobile Robot]]></category>
		<category><![CDATA[Autonomous Mobile Robots]]></category>
		<category><![CDATA[autonomous mobility]]></category>
		<category><![CDATA[autonomous vehicles]]></category>
		<category><![CDATA[localization]]></category>
		<category><![CDATA[map-based localization]]></category>
		<category><![CDATA[Map-Based Localization for Autonomous Driving Workshop]]></category>
		<category><![CDATA[Simultaneous Localization and Mapping]]></category>
		<guid isPermaLink="false">https://www.kudan.io/?p=1765</guid>

					<description><![CDATA[<p>(Written by Anthony Glynn, Kudan CTO) Consider a robot navigating the bustling aisles of a warehouse, swiftly picking up and delivering items. It must decide how quickly to move and how much space to leave when turning corners to avoid accidents, like clipping a shelf and causing a cascade of items. For this, the robot [&#8230;]</p>
<p>The post <a href="https://www.kudan.io/blog/understanding-covariance-quality-in-robot-localisation/">Understanding Covariance Quality in Robot Localisation</a> first appeared on <a href="https://www.kudan.io">Kudan global</a>.</p>]]></description>
										<content:encoded><![CDATA[<p>(Written by <a href="https://www.linkedin.com/in/anthony-glynn-952b6653/">Anthony Glynn</a>, Kudan CTO)</p>
<p><img loading="lazy" class="size-large wp-image-1775 aligncenter" src="https://i0.wp.com/www.kudan.io/wp-content/uploads/2024/02/Screenshot-2024-02-27-at-10.16.03-1024x455.png?resize=1024%2C455&#038;ssl=1" alt="" width="1024" height="455" srcset="https://i0.wp.com/www.kudan.io/wp-content/uploads/2024/02/Screenshot-2024-02-27-at-10.16.03.png?resize=1024%2C455&amp;ssl=1 1024w, https://i0.wp.com/www.kudan.io/wp-content/uploads/2024/02/Screenshot-2024-02-27-at-10.16.03.png?resize=300%2C133&amp;ssl=1 300w, https://i0.wp.com/www.kudan.io/wp-content/uploads/2024/02/Screenshot-2024-02-27-at-10.16.03.png?resize=768%2C342&amp;ssl=1 768w, https://i0.wp.com/www.kudan.io/wp-content/uploads/2024/02/Screenshot-2024-02-27-at-10.16.03.png?resize=1536%2C683&amp;ssl=1 1536w, https://i0.wp.com/www.kudan.io/wp-content/uploads/2024/02/Screenshot-2024-02-27-at-10.16.03.png?w=1808&amp;ssl=1 1808w" sizes="(max-width: 1000px) 100vw, 1000px" data-recalc-dims="1" /></p>
<p>Consider a robot navigating the bustling aisles of a warehouse, swiftly picking up and delivering items. It must decide how quickly to move and how much space to leave when turning corners to avoid accidents, like clipping a shelf and causing a cascade of items. For this, the robot relies on its localisation module which integrates data from its sensors, such as cameras, lidars and wheel odometry, and combines this with a prebuilt map of the environment to pinpoint its precise location. The localisation system must not only output its position but also assess how confident it is in its estimate. This confidence, quantified by something called covariance, is crucial. Accurate location data is essential, but so is the robot&#8217;s certainty about this data. If the robot misjudges its certainty, being either too confident or too cautious, it could lead to reckless behaviour or to an overly hesitant and inefficient system.</p>
<p><strong>Covariance</strong></p>
<p data-renderer-start-pos="949">Rather than relying on a single, precise location estimate, our localisation system instead outputs an entire probability distribution. Covariance, which comes from modelling our estimate as a Gaussian distribution, extends the concept of variance to multiple dimensions. It is represented as a matrix and captures both the notion of how spread out our estimates are, as well as the correlation between the different aspects of the robot’s pose such as the x and y coordinates. A larger covariance indicates a wider spread, signalling greater uncertainty: the robot’s true location could fall within a broader range of values.</p>
<p data-renderer-start-pos="949"><img loading="lazy" class="aligncenter wp-image-1766 size-full" src="https://i0.wp.com/www.kudan.io/wp-content/uploads/2024/02/download-2.png?resize=389%2C389&#038;ssl=1" alt="" width="389" height="389" srcset="https://i0.wp.com/www.kudan.io/wp-content/uploads/2024/02/download-2.png?w=389&amp;ssl=1 389w, https://i0.wp.com/www.kudan.io/wp-content/uploads/2024/02/download-2.png?resize=300%2C300&amp;ssl=1 300w, https://i0.wp.com/www.kudan.io/wp-content/uploads/2024/02/download-2.png?resize=150%2C150&amp;ssl=1 150w" sizes="(max-width: 389px) 100vw, 389px" data-recalc-dims="1" /></p>
<p data-renderer-start-pos="949">(Image: two Gaussian distributions each represented by 500 samples and an ellipse depicting the 90% confidence region. The blue distribution has a much smaller covariance than the red distribution, indicating a more certain position estimate.)</p>
<p data-renderer-start-pos="1821">Effective decision making relies heavily on covariance. The system needs to determine if its confidence in its location estimate is sufficient to proceed with its current task, or if it must take corrective action and attempt to reduce its position uncertainty. Path planners can take pose covariance as input, and this allows them to adjust movement speed as well as path safety margins.</p>
<p data-renderer-start-pos="2211">Covariance also plays a vital role when integrating measurements from different sensors or combining pose estimates output from various internal modules, offering a systematic way to appropriately weight this information. Higher confidence data will be given more weight. This ensures that the most reliable information has the greatest influence on the system’s overall pose estimate.</p>
<p data-renderer-start-pos="2598">It is important that the covariance that is output accurately reflects the true level of uncertainty. An overconfident could be dangerous, and a system that is too underconfident might be too inefficient.</p>
<p data-renderer-start-pos="2598"><strong>Overconfidence</strong></p>
<p data-renderer-start-pos="2598">The system is overconfident if it assumes it’s location and map are more accurate than they actually are. The output pose covariance will be smaller than it ought to be, meaning the system is underestimating the probability that its actual location could be further away from where it thinks it is.</p>
<p data-renderer-start-pos="3123">This can lead to underestimating new information. If it believes in its current pose estimate too strongly, it may undervalue new, especially conflicting, data. As a consequence it might resist adapting to new situations. This could even lead it to disregard corrective information, potentially preventing it ultimately from reducing error.</p>
<p data-renderer-start-pos="3465">An overconfident might cause the robot to exhibit risky behaviours such as travelling too quickly, or not leaving enough obstacle clearance. This could potentially result in dangerous situations, such as collisions or the robot getting stuck.</p>
<h4 id="Underconfidence" data-renderer-start-pos="3711"><strong>Underconfidence</strong></h4>
<p data-renderer-start-pos="3728">Conversely, an underconfident system will be excessively cautious regarding the quality of its pose estimate, resulting in an excessively large covariance. This means it is exaggerating the likelihood that its true location is significantly different from its estimated position.</p>
<p data-renderer-start-pos="4009">This would likely result in reduced efficiency, or increased running times as a result from overly cautious behaviours<strong data-renderer-mark="true">. </strong>For example the robot might move at a ridiculously slow pace, or it might repeatedly keep deciding it requires additional data and processing time to confirm already known information.</p>
<h4 id="Understanding-covariance-quality" data-renderer-start-pos="4317"><strong>Understanding covariance quality</strong></h4>
<p data-renderer-start-pos="4351">It is therefore imperative that we are able to analyse and understand the quality of the covariance estimates that the system, or any of its internal modules, produces. A good covariance should accurately model the probability: the “true” pose should be contained inside the estimated covariance’s 90% confidence region 90% of the time. It is realistic to expect some degree of degradation in the covariance quality because the system is nonlinear. This means the true probability distribution, in general, can’t be perfectly modelled as a Gaussian distribution, so the Gaussian representation will necessarily be an approximation.</p>
<p data-renderer-start-pos="4984">To perform this analysis we look at the system’s performance over a large variety of datasets, and compare it to ground-truth. Internally at Kudan we are continuing to explore better ways of measuring and visualising covariance quality, as well as trying to understand which variables have the most significant impact on covariance quality.</p>
<p data-renderer-start-pos="5326">Once a system’s covariance quality is understood, the next step is to use this information to calibrate the uncertainty estimation: adjusting the estimated covariance in order to better represent the true uncertainty.</p>
<p data-renderer-start-pos="5326"><strong>Closing thoughts</strong></p>
<p>The management of uncertainty through covariance is fundamental to the operational success of mobile robots, ensuring both safety and efficiency in dynamic environments such as warehouses. By refining our understanding and calibration of covariance estimates, we continue pushing closer to finding the right balance between avoiding the pitfalls of dangerous overconfidence, and the inefficiencies of undue caution.</p>
<p>&nbsp;</p>
<p><a href="https://www.kudan.io/contact/"><strong>Please contact us for learning further technical information</strong></a></p>
<p>&nbsp;</p><p>The post <a href="https://www.kudan.io/blog/understanding-covariance-quality-in-robot-localisation/">Understanding Covariance Quality in Robot Localisation</a> first appeared on <a href="https://www.kudan.io">Kudan global</a>.</p>]]></content:encoded>
					
		
		
		<post-id xmlns="com-wordpress:feed-additions:1">1765</post-id>	</item>
		<item>
		<title>Kudan Announces Release of KdVisual SLAM Software 2 on Intel Edge Insight for AMR</title>
		<link>https://www.kudan.io/blog/release-of-kdvisual-slam-software-2-on-intel-edge-insight-for-amr/?utm_source=rss&#038;utm_medium=rss&#038;utm_campaign=release-of-kdvisual-slam-software-2-on-intel-edge-insight-for-amr</link>
		
		<dc:creator><![CDATA[user]]></dc:creator>
		<pubDate>Mon, 13 Nov 2023 07:00:00 +0000</pubDate>
				<category><![CDATA[Press Release]]></category>
		<category><![CDATA[AMR]]></category>
		<category><![CDATA[Autonomous Mobile Robots]]></category>
		<category><![CDATA[Edge Insight]]></category>
		<category><![CDATA[Intel]]></category>
		<category><![CDATA[KdVisual]]></category>
		<category><![CDATA[KdVisual SLAM Software 2]]></category>
		<category><![CDATA[Kudan]]></category>
		<category><![CDATA[Kudan Visual SLAM]]></category>
		<category><![CDATA[SLAM]]></category>
		<guid isPermaLink="false">https://www.kudan.io/?p=1697</guid>

					<description><![CDATA[<p>Showcasing 4x reduction in memory usage, and 2x improvement in accuracy, KdVisual 2, is fully optimized for Intel&#8217;s Edge Insight for Autonomous Mobile Robots (AMR). With hardware acceleration and optimization provided by Intel for 11th generation and subsequent Intel Core processors, Kudan solidifies its commitment to delivering superior performance in commercial robotics. An all-encompassing video [&#8230;]</p>
<p>The post <a href="https://www.kudan.io/blog/release-of-kdvisual-slam-software-2-on-intel-edge-insight-for-amr/">Kudan Announces Release of KdVisual SLAM Software 2 on Intel Edge Insight for AMR</a> first appeared on <a href="https://www.kudan.io">Kudan global</a>.</p>]]></description>
										<content:encoded><![CDATA[<p><strong><em>Showcasing 4x reduction in memory usage, and 2x improvement in accuracy, KdVisual 2, is fully optimized for Intel&#8217;s Edge Insight for Autonomous Mobile Robots (AMR). With hardware acceleration and optimization provided by Intel for 11th generation and subsequent Intel Core processors, Kudan solidifies its commitment to delivering superior performance in commercial robotics. An all-encompassing video tutorial series further aids users in seamlessly integrating the SLAM software into their AMR products.</em></strong></p>
<p>Kudan Inc. (headquarters in Shibuya-ku, Tokyo; CEO Daiu Ko, hereafter “Kudan”), a leading provider of real-time simultaneous localization and mapping (SLAM) software, is elated to introduce its latest KdVisual SLAM Software 2 for Intel EI for AMR. This major upgrade accentuates the integration of best-in-class SLAM technology with Intel&#8217;s Edge Insight for AMR, continuing its transformation in commercial-ready robotics applications.</p>
<p><img loading="lazy" class="aligncenter wp-image-1699 size-large" src="https://i0.wp.com/www.kudan.io/wp-content/uploads/2023/11/pic_intel-1024x415.png?resize=1024%2C415&#038;ssl=1" alt="" width="1024" height="415" srcset="https://i0.wp.com/www.kudan.io/wp-content/uploads/2023/11/pic_intel.png?resize=1024%2C415&amp;ssl=1 1024w, https://i0.wp.com/www.kudan.io/wp-content/uploads/2023/11/pic_intel.png?resize=300%2C122&amp;ssl=1 300w, https://i0.wp.com/www.kudan.io/wp-content/uploads/2023/11/pic_intel.png?resize=768%2C311&amp;ssl=1 768w, https://i0.wp.com/www.kudan.io/wp-content/uploads/2023/11/pic_intel.png?w=1125&amp;ssl=1 1125w" sizes="(max-width: 1000px) 100vw, 1000px" data-recalc-dims="1" /></p>
<p>As the robotic landscape reverberates with a growing demand for precise, real-time 3D mapping, the collaboration between Kudan and Intel materializes a holistic hardware-software solution to cater to these industry needs. KdVisual 2 not only offers enhanced performance and robustness but also takes pride in the hardware acceleration and optimization provided by Intel, on 11th generation and beyond Intel Core processors.</p>
<p>Key features and improvements include:</p>
<ul>
<li><strong>Enhanced Memory Usage and Accuracy</strong>: KdVisual 2 represents a major architectural overhaul which further reduces memory usage by 4x and improves accuracy by 2x compared to previous KdVisual 1, ensuring real-time mapping and localization with support for a wider range of computing requirements.</li>
<li><strong>Robust Performance</strong>: Through the implementation of refined architecture for the SLAM pipeline, KdVisual 2 ensures repeatable, and deterministic results for reliable mapping and localization.</li>
<li><strong>Seamless Integration</strong>: Perfectly attuned to Intel&#8217;s Edge Insight for AMR, KdVisual 2 promises effortless deployment across platforms.</li>
<li><strong>Intel-Powered Hardware Acceleration</strong>: Boasting hardware optimization provided by Intel for its 11th generation and subsequent Core processors, KdVisual 2 affirms top-tier performance metrics in robotic applications.</li>
<li><strong>Commercial Robotics Centric</strong>: From dynamic logistics centers to innovative manufacturing sites, KdVisual 2 emerges as the best-in-class visual SLAM component.</li>
<li><strong>Comprehensive Tutorial Series</strong>: A meticulously crafted tutorial video series walks users through every phase: from download, installation, system setup, to the real-time deployment on a development robot.</li>
</ul>
<p>Reflecting on our continued partnership with Intel, Daiu Ko, CEO of Kudan Inc., expressed, &#8220;Our sustained partnership with Intel echoes our shared vision of pioneering next-gen robotics. The infusion of our latest KdVisual SLAM software, underpinned by Intel&#8217;s hardware optimization, reinforces our dedication to steering the AMR domain towards unparalleled heights.&#8221;</p>
<p>Echoing the sentiment, Intel&#8217;s latest rollout of the EI for AMR software package underscores its relentless effort to redefine the future of robotics. By leveraging Intel&#8217;s hardware and software expertise with Kudan&#8217;s cutting-edge software solutions, we are paving the way for unprecedented breakthroughs in robotics.</p>
<p>For an immersive exploration of KdVisual 2 SLAM Software and its synergy with Intel&#8217;s Edge Insight for AMR, navigate to the <a href="https://www.kudan.io/blog/kdvisual-slam-software-2-for-intel-ei-for-amr/" target="_blank" rel="noopener">blog article</a> and <a href="https://www.intel.com/content/www/us/en/partner/showcase/kudan/overview.html" target="_blank" rel="noopener">Intel EI for AMR website featuring Kudan SLAM</a> and the <a href="https://www.intel.com/content/www/us/en/docs/ei-for-amr/developer-guide/2022-3-1/kudan-visual-slam.html" target="_blank" rel="noopener">Developer’s Guide</a>.</p>
<p><strong>About Kudan Inc.</strong><br />
Kudan is a deep tech research and development company specializing in algorithms for artificial perception (AP). As a complement to artificial intelligence (AI), AP functions allow machines to develop autonomy. Currently, Kudan is using its high-level technical innovation to explore business areas based on its own milestone models established for deep tech which provide wide-ranging impact on several major industrial fields.<br />
For more information, please refer to Kudan’s website at <a href="https://www.kudan.io/" target="_blank" rel="noopener">https://www.kudan.io/</a>.</p>
<p>■Company Details<br />
Name: Kudan Inc.<br />
Securities Code: 4425 (TSE Growth)<br />
Representative: CEO Daiu Ko</p>
<p>■For more details, please contact us from <a href="https://www.kudan.io/contact" target="_blank" rel="noopener">here</a>.</p><p>The post <a href="https://www.kudan.io/blog/release-of-kdvisual-slam-software-2-on-intel-edge-insight-for-amr/">Kudan Announces Release of KdVisual SLAM Software 2 on Intel Edge Insight for AMR</a> first appeared on <a href="https://www.kudan.io">Kudan global</a>.</p>]]></content:encoded>
					
		
		
		<post-id xmlns="com-wordpress:feed-additions:1">1697</post-id>	</item>
		<item>
		<title>Introducing Kudan&#8217;s Revamped KdVisual SLAM Software 2 for Intel EI for AMR: A New Era for Robotic Applications</title>
		<link>https://www.kudan.io/blog/kdvisual-slam-software-2-for-intel-ei-for-amr/?utm_source=rss&#038;utm_medium=rss&#038;utm_campaign=kdvisual-slam-software-2-for-intel-ei-for-amr</link>
		
		<dc:creator><![CDATA[user]]></dc:creator>
		<pubDate>Mon, 13 Nov 2023 06:00:34 +0000</pubDate>
				<category><![CDATA[Tech Blog]]></category>
		<category><![CDATA[AMR]]></category>
		<category><![CDATA[Autonomous Mobile Robots]]></category>
		<category><![CDATA[Intel]]></category>
		<category><![CDATA[Intel's Edge Insight]]></category>
		<category><![CDATA[KdVisual]]></category>
		<category><![CDATA[KdVisual SLAM Software 2]]></category>
		<category><![CDATA[Kudan]]></category>
		<category><![CDATA[Kudan Visual SLAM]]></category>
		<guid isPermaLink="false">https://www.kudan.io/?p=1690</guid>

					<description><![CDATA[<p>The ever-evolving landscape of robotics and artificial intelligence (AI) continuously beckons for advancements that can cater to growing industry demands. Addressing this call, we at Kudan are thrilled to unveil our latest innovation, the KdVisual SLAM Software 2, optimized for Intel&#8217;s Edge Insight for Autonomous Mobile Robots (AMR). A Partnership Carved in Technological Excellence Kudan&#8217;s [&#8230;]</p>
<p>The post <a href="https://www.kudan.io/blog/kdvisual-slam-software-2-for-intel-ei-for-amr/">Introducing Kudan’s Revamped KdVisual SLAM Software 2 for Intel EI for AMR: A New Era for Robotic Applications</a> first appeared on <a href="https://www.kudan.io">Kudan global</a>.</p>]]></description>
										<content:encoded><![CDATA[<p>The ever-evolving landscape of robotics and artificial intelligence (AI) continuously beckons for advancements that can cater to growing industry demands. Addressing this call, we at Kudan are thrilled to unveil our latest innovation, the KdVisual SLAM Software 2, optimized for Intel&#8217;s Edge Insight for Autonomous Mobile Robots (AMR).</p>
<h2><strong>A Partnership Carved in Technological Excellence</strong></h2>
<p>Kudan&#8217;s alliance with Intel has always been underscored by a shared vision of propelling the world of robotics into the future. Our combined efforts have led to a version of KdVisual SLAM software that not only promises swifter performance and increased robustness but also basks in the hardware acceleration and optimization, generously provided by Intel, for the 11th generation and subsequent Intel Core processors. Check out the partner show case of Kudan from Intel:<br />
<a href="https://www.intel.com/content/www/us/en/partner/showcase/kudan/overview.html" target="_blank" rel="noopener">https://www.intel.com/content/www/us/en/partner/showcase/kudan/overview.html</a></p>
<p><img loading="lazy" class="aligncenter wp-image-1691 size-large" src="https://i0.wp.com/www.kudan.io/wp-content/uploads/2023/11/blog-pic_intel-1024x289.png?resize=1024%2C289&#038;ssl=1" alt="" width="1024" height="289" srcset="https://i0.wp.com/www.kudan.io/wp-content/uploads/2023/11/blog-pic_intel.png?resize=1024%2C289&amp;ssl=1 1024w, https://i0.wp.com/www.kudan.io/wp-content/uploads/2023/11/blog-pic_intel.png?resize=300%2C85&amp;ssl=1 300w, https://i0.wp.com/www.kudan.io/wp-content/uploads/2023/11/blog-pic_intel.png?resize=768%2C217&amp;ssl=1 768w, https://i0.wp.com/www.kudan.io/wp-content/uploads/2023/11/blog-pic_intel.png?w=1031&amp;ssl=1 1031w" sizes="(max-width: 1000px) 100vw, 1000px" data-recalc-dims="1" /></p>
<h2><strong>What’s New in KdVisual SLAM Software 2 for Intel EI for AMR?</strong></h2>
<ul>
<li><strong>New Architecture</strong>: KdVisual 2 represents the culmination of learnings from the original KdVisual, and optimized within a new architecture. KdVisual 2 brings refined operations for SLAM for reliable and repeatable deterministic behavior.</li>
<li><strong>Unparalleled Memory Usage</strong>: KdVisual 2’s advanced algorithm reduces memory usage by 4x over KdVisual 1, which already was leader in memory and processor usage.</li>
<li><strong>Unwavering Precision</strong>: Precision-engineered to tackle even the most complex environments, KdVisual 2 brings 2x improvement in accuracy, continuing to be the dependable choice for mapping and localization.</li>
<li><strong>Integration like Never Before</strong>: The software seamlessly integrates with Intel&#8217;s Edge Insight for AMR, guaranteeing effortless deployment across various platforms.</li>
<li><strong>Hardware Excellence by Intel</strong>: The software is supercharged with hardware optimization provided by Intel, promising unparalleled performance in all robotic endeavors.</li>
</ul>
<h2><strong>Dive Deep with Our Tutorial Series</strong></h2>
<p>We understand that new advancements always come with new learnings. To ensure a smooth transition to this major upgrade for our user community, we&#8217;ve launched an all-encompassing tutorial video series. These tutorials have been meticulously crafted to guide users through every phase of the software application, from the initial download and installation to setting up the system and real-time deployment on a development robot.</p>
<p>Access the Tutorial Videos Here:</p>
<ul>
<li><a href="https://www.youtube.com/watch?v=hpKcIW_eAXQ" target="_blank" rel="noopener">Introducing KdVisual on Intel EI for AMR</a></li>
<li><a href="https://www.youtube.com/watch?v=lFKToU7F_3Q" target="_blank" rel="noopener">Installation &amp; Setup of KdVisual on Intel EI for AMR</a></li>
<li><a href="https://www.youtube.com/watch?v=JPbxeSneSeM" target="_blank" rel="noopener">Deploying Intel EI for AMR with KdVisual on a Development Robot</a></li>
</ul>
<h2><strong>A New Dawn in Robotics</strong></h2>
<p>At Kudan, we believe that technology should evolve in tandem with the needs of its users. This release, a culmination of persistent endeavors and unwavering vision, marks the beginning of a new chapter in commercial robotics applications. As we march forward, we invite our community of developers, innovators, and tech enthusiasts to join us on this exciting journey into the future.</p>
<p>■For more details, please contact us from <a href="https://www.kudan.io/contact" target="_blank" rel="noopener noreferrer">here</a>.</p><p>The post <a href="https://www.kudan.io/blog/kdvisual-slam-software-2-for-intel-ei-for-amr/">Introducing Kudan’s Revamped KdVisual SLAM Software 2 for Intel EI for AMR: A New Era for Robotic Applications</a> first appeared on <a href="https://www.kudan.io">Kudan global</a>.</p>]]></content:encoded>
					
		
		
		<post-id xmlns="com-wordpress:feed-additions:1">1690</post-id>	</item>
		<item>
		<title>Kudan’s strategic partner Movel AI starts commercial offering for integrated solution for AMR with Kudan SLAM</title>
		<link>https://www.kudan.io/blog/kudan-movel-partnership-commercial-milestone-2023/?utm_source=rss&#038;utm_medium=rss&#038;utm_campaign=kudan-movel-partnership-commercial-milestone-2023</link>
		
		<dc:creator><![CDATA[user]]></dc:creator>
		<pubDate>Thu, 16 Mar 2023 08:00:47 +0000</pubDate>
				<category><![CDATA[Press Release]]></category>
		<category><![CDATA[all-in-one solution]]></category>
		<category><![CDATA[AMR]]></category>
		<category><![CDATA[Autonomous Mobile Robots]]></category>
		<category><![CDATA[commercial offering]]></category>
		<category><![CDATA[Fleet Management System (FMS)]]></category>
		<category><![CDATA[fleet managemet]]></category>
		<category><![CDATA[KdLidar]]></category>
		<category><![CDATA[KdVisual]]></category>
		<category><![CDATA[Kudan]]></category>
		<category><![CDATA[Kudan 3D-Lidar SLAM]]></category>
		<category><![CDATA[Kudan SLAM]]></category>
		<category><![CDATA[Kudan Visual SLAM]]></category>
		<category><![CDATA[Lidar SLAM]]></category>
		<category><![CDATA[localization]]></category>
		<category><![CDATA[Movel AI]]></category>
		<category><![CDATA[Navigation]]></category>
		<category><![CDATA[Robotic Navigation System (RNS)]]></category>
		<category><![CDATA[Seirios]]></category>
		<category><![CDATA[Simultaneous Localization and Mapping]]></category>
		<category><![CDATA[SLAM]]></category>
		<category><![CDATA[Visual SLAM]]></category>
		<guid isPermaLink="false">https://www.kudan.io/?p=1599</guid>

					<description><![CDATA[<p>Kudan Inc. (headquarters in Shibuya-ku, Tokyo; CEO Daiu Ko, hereafter “Kudan”) is happy to announce that our strategic partner, Movel AI, has wholly integrated Kudan’s 3D Lidar ‘KdLidar’ and visual ‘KdVisual’ Simultaneous Localization and Mapping (SLAM) software into its all-in-one commercial software solution ‘Seirios’ for autonomous mobile robots (AMR). This Kudan SLAM integrated solution is [&#8230;]</p>
<p>The post <a href="https://www.kudan.io/blog/kudan-movel-partnership-commercial-milestone-2023/">Kudan’s strategic partner Movel AI starts commercial offering for integrated solution for AMR with Kudan SLAM</a> first appeared on <a href="https://www.kudan.io">Kudan global</a>.</p>]]></description>
										<content:encoded><![CDATA[<p>Kudan Inc. (headquarters in Shibuya-ku, Tokyo; CEO Daiu Ko, hereafter “Kudan”) is happy to announce that our strategic partner, <a href="https://movel.ai/" target="_blank" rel="noopener">Movel AI</a>, has wholly integrated Kudan’s 3D Lidar ‘KdLidar’ and visual ‘KdVisual’ Simultaneous Localization and Mapping (SLAM) software into its all-in-one commercial software solution ‘Seirios’ for autonomous mobile robots (AMR). This Kudan SLAM integrated solution is now available for the global market and is expected to have multiple commercial deployments on customer sites in the coming months.</p>
<p><img loading="lazy" class="aligncenter wp-image-1601 size-full" src="https://i0.wp.com/www.kudan.io/wp-content/uploads/2023/03/Pic1_Movel-AI.png?resize=939%2C527&#038;ssl=1" alt="" width="939" height="527" srcset="https://i0.wp.com/www.kudan.io/wp-content/uploads/2023/03/Pic1_Movel-AI.png?w=939&amp;ssl=1 939w, https://i0.wp.com/www.kudan.io/wp-content/uploads/2023/03/Pic1_Movel-AI.png?resize=300%2C168&amp;ssl=1 300w, https://i0.wp.com/www.kudan.io/wp-content/uploads/2023/03/Pic1_Movel-AI.png?resize=768%2C431&amp;ssl=1 768w" sizes="(max-width: 939px) 100vw, 939px" data-recalc-dims="1" /></p>
<p>Movel AI, the Singapore-based robotics software company, is widely offering its clients this all-in-one localization, navigation and fleet management software through a seamless user interface which will benefit from Kudan’s robust and highly accurate positioning software.</p>
<p>Kudan and Movel AI began talks in 2022 and the aligned synergy between both companies made us start to collaborate on product development and business expansion towards AMR market.</p>
<p>This partnership enabled Movel AI to integrate Kudan’s 3D Lidar SLAM and Visual SLAM into its Seirios <a href="https://movel.ai/seirios-rns" target="_blank" rel="noopener">Robotic Navigation System (RNS)</a> and <a href="https://movel.ai/seirios-fms" target="_blank" rel="noopener">Fleet Management System (FMS)</a> to offer greater localization and mapping capabilities to its clients. Both new and existing clients can experience enhanced localization accuracy by up to 2cm, so that robots can more precisely position themselves and navigate in the environment. Kudan SLAM also allows the robots to operate in dynamic and complex environments with excellent robustness, reliability and operational efficiency compared to other open-source solutions.</p>
<div id="attachment_1602" style="width: 949px" class="wp-caption aligncenter"><img aria-describedby="caption-attachment-1602" loading="lazy" class="wp-image-1602 size-full" src="https://i0.wp.com/www.kudan.io/wp-content/uploads/2023/03/Pic2_Movel-AI.png?resize=939%2C550&#038;ssl=1" alt="" width="939" height="550" srcset="https://i0.wp.com/www.kudan.io/wp-content/uploads/2023/03/Pic2_Movel-AI.png?w=939&amp;ssl=1 939w, https://i0.wp.com/www.kudan.io/wp-content/uploads/2023/03/Pic2_Movel-AI.png?resize=300%2C176&amp;ssl=1 300w, https://i0.wp.com/www.kudan.io/wp-content/uploads/2023/03/Pic2_Movel-AI.png?resize=768%2C450&amp;ssl=1 768w" sizes="(max-width: 939px) 100vw, 939px" data-recalc-dims="1" /><p id="caption-attachment-1602" class="wp-caption-text">Figure 1: Seiros’ Lane Navigation Feature (Image Source: Movel AI)</p></div>
<p>&nbsp;</p>
<div id="attachment_1603" style="width: 949px" class="wp-caption aligncenter"><img aria-describedby="caption-attachment-1603" loading="lazy" class="wp-image-1603 size-full" src="https://i0.wp.com/www.kudan.io/wp-content/uploads/2023/03/Pic3_Movel-AI.png?resize=939%2C658&#038;ssl=1" alt="" width="939" height="658" srcset="https://i0.wp.com/www.kudan.io/wp-content/uploads/2023/03/Pic3_Movel-AI.png?w=939&amp;ssl=1 939w, https://i0.wp.com/www.kudan.io/wp-content/uploads/2023/03/Pic3_Movel-AI.png?resize=300%2C210&amp;ssl=1 300w, https://i0.wp.com/www.kudan.io/wp-content/uploads/2023/03/Pic3_Movel-AI.png?resize=768%2C538&amp;ssl=1 768w, https://i0.wp.com/www.kudan.io/wp-content/uploads/2023/03/Pic3_Movel-AI.png?resize=600%2C419&amp;ssl=1 600w" sizes="(max-width: 939px) 100vw, 939px" data-recalc-dims="1" /><p id="caption-attachment-1603" class="wp-caption-text">Figure 2: Seirios FMS Manage Map Feature (Image source: Movel AI)</p></div>
<p>For Kudan, this collaboration and release of all-in-one solution allows us to offer customers navigation and fleet management solutions on top of our SLAM software. This will benefit customers who are looking to accelerate their go-to-market plan or to scale their robotic fleet aggressively.</p>
<div id="attachment_1604" style="width: 949px" class="wp-caption aligncenter"><img aria-describedby="caption-attachment-1604" loading="lazy" class="wp-image-1604 size-full" src="https://i0.wp.com/www.kudan.io/wp-content/uploads/2023/03/Pic4_Movel-AI.png?resize=939%2C458&#038;ssl=1" alt="" width="939" height="458" srcset="https://i0.wp.com/www.kudan.io/wp-content/uploads/2023/03/Pic4_Movel-AI.png?w=939&amp;ssl=1 939w, https://i0.wp.com/www.kudan.io/wp-content/uploads/2023/03/Pic4_Movel-AI.png?resize=300%2C146&amp;ssl=1 300w, https://i0.wp.com/www.kudan.io/wp-content/uploads/2023/03/Pic4_Movel-AI.png?resize=768%2C375&amp;ssl=1 768w" sizes="(max-width: 939px) 100vw, 939px" data-recalc-dims="1" /><p id="caption-attachment-1604" class="wp-caption-text">Figure 3: Kudan’s 3D-Lidar SLAM (Reference video : <a href="https://youtu.be/JVXS6q2KoGE" target="_blank" rel="noopener">https://youtu.be/JVXS6q2KoGE</a>)</p></div>
<div id="attachment_1605" style="width: 949px" class="wp-caption aligncenter"><img aria-describedby="caption-attachment-1605" loading="lazy" class="wp-image-1605 size-full" src="https://i0.wp.com/www.kudan.io/wp-content/uploads/2023/03/Pic5_Movel-AI.png?resize=939%2C427&#038;ssl=1" alt="" width="939" height="427" srcset="https://i0.wp.com/www.kudan.io/wp-content/uploads/2023/03/Pic5_Movel-AI.png?w=939&amp;ssl=1 939w, https://i0.wp.com/www.kudan.io/wp-content/uploads/2023/03/Pic5_Movel-AI.png?resize=300%2C136&amp;ssl=1 300w, https://i0.wp.com/www.kudan.io/wp-content/uploads/2023/03/Pic5_Movel-AI.png?resize=768%2C349&amp;ssl=1 768w" sizes="(max-width: 939px) 100vw, 939px" data-recalc-dims="1" /><p id="caption-attachment-1605" class="wp-caption-text">Figure 4: Kudan’s Visual SLAM (Reference video : <a href="https://youtu.be/wm4M_jW4zyc" target="_blank" rel="noopener">https://youtu.be/wm4M_jW4zyc</a>)</p></div>
<p>&#8220;We are thrilled to partner with Movel AI and witness the commercial release of this integrated solution leveraging our market-leading SLAM technologies.&#8221; said Daiu Ko, CEO of Kudan. &#8220;With our collaboration, we look forward to leveraging the business footprint of both companies in different regions and helping AMR vendors in the global market grow their deployment base with enhanced performance and operational efficiency enabled by our joint solution.”</p>
<p>&#8220;At Movel AI, we aim to speed up robot adoption across industries. Our partnership with Kudan is a crucial step towards achieving this vision, while simultaneously advancing robotics companies to leverage enhanced navigation experiences.” said Abhishek Gupta, CEO of Movel AI. &#8220;Through collaborating with Kudan, we are confident in delivering a greater value by leveraging our expertise to offer innovative navigation solutions to the robotics market. This partnership is a significant milestone for both companies, and we look forward to the opportunities it presents.”</p>
<p>The joint solution has already generated the interest of multiple robotics companies, which are now currently testing it in different phases and progressing towards commercial site deployment. Seirios’ technology will enable everyone, even non-tech users, to enjoy the performance capabilities and experience a seamless robot navigation. Furthermore, it can also be scaled efficiently across robotic fleets, allowing robot fleet managers to run and delegate tasks to up to 100 robots with one single system.</p>
<p>To find out more about this all-in-one solution or to try a 15-day trial, please reach out to us via <a href="https://www.kudan.io/contact" target="_blank" rel="noopener">Kudan</a> or <a href="https://movel.ai/" target="_blank" rel="noopener">Movel AI</a>.</p>
<p><span style="text-decoration: underline;"><strong>About Movel AI</strong></span><br />
Movel AI is a robotics software, deep tech startup based in Singapore. Movel AI delivers human-like precision and movements to robots; combining sensor fusion, vision and machine learning &amp; artificial intelligence technologies. Movel AI’s solutions are tailored to customers across different industries, applications and needs; from AGVs (automated guided vehicles) traversing factory grounds for logistical movements, to inspection robots scanning structural defects within multi-storey buildings, to enabling automated navigation in personal mobility devices. For more information, please refer to Movel AI’s website at <a href="http://www.movel.ai" target="_blank" rel="noopener">www.movel.ai</a>.</p>
<p><span style="text-decoration: underline;"><strong>About Kudan Inc.</strong></span><br />
Kudan is a deep tech research and development company specializing in algorithms for artificial perception (AP). As a complement to artificial intelligence (AI), AP functions allow machines to develop autonomy. Currently, Kudan is using its high-level technical innovation to explore business areas based on its own milestone models established for deep tech which provide wide-ranging impact on several major industrial fields.<br />
For more information, please refer to Kudan’s website at <a href="https://www.kudan.io/" target="_blank" rel="noopener noreferrer">https://www.kudan.io/</a>.</p>
<p>■Company Details<br />
Name: Kudan Inc.<br />
Securities Code: 4425 (TSE Growth)<br />
Representative: CEO Daiu Ko</p>
<p>■For more details, please contact us from <a href="https://www.kudan.io/contact" target="_blank" rel="noopener noreferrer">here</a>.</p><p>The post <a href="https://www.kudan.io/blog/kudan-movel-partnership-commercial-milestone-2023/">Kudan’s strategic partner Movel AI starts commercial offering for integrated solution for AMR with Kudan SLAM</a> first appeared on <a href="https://www.kudan.io">Kudan global</a>.</p>]]></content:encoded>
					
		
		
		<post-id xmlns="com-wordpress:feed-additions:1">1599</post-id>	</item>
		<item>
		<title>ProMat 2023: Kudan to exhibit and present its visual and 3D Lidar SLAM solutions for intralogistics robotics use cases</title>
		<link>https://www.kudan.io/blog/promat-exhibit-2023/?utm_source=rss&#038;utm_medium=rss&#038;utm_campaign=promat-exhibit-2023</link>
		
		<dc:creator><![CDATA[user]]></dc:creator>
		<pubDate>Tue, 14 Mar 2023 06:00:01 +0000</pubDate>
				<category><![CDATA[Press Release]]></category>
		<category><![CDATA[3D-Lidar SLAM]]></category>
		<category><![CDATA[AMR]]></category>
		<category><![CDATA[Autonomous Mobile Robots]]></category>
		<category><![CDATA[Edge Insight]]></category>
		<category><![CDATA[event]]></category>
		<category><![CDATA[Intel]]></category>
		<category><![CDATA[intralogistics]]></category>
		<category><![CDATA[Kudan]]></category>
		<category><![CDATA[Kudan’s 3D Lidar SLAM]]></category>
		<category><![CDATA[logistics]]></category>
		<category><![CDATA[material handling]]></category>
		<category><![CDATA[ProMat 2023]]></category>
		<category><![CDATA[SDK]]></category>
		<category><![CDATA[Simultaneous Localization and Mapping]]></category>
		<category><![CDATA[SLAM]]></category>
		<category><![CDATA[supply chain show]]></category>
		<category><![CDATA[Visual SLAM]]></category>
		<guid isPermaLink="false">https://www.kudan.io/?p=1594</guid>

					<description><![CDATA[<p>Kudan Inc. (headquartered in Shibuya-ku, Tokyo; CEO Daiu Ko, hereafter “Kudan”) will be exhibiting at the upcoming ProMat conference to showcase its solution for applications within the manufacturing and supply chain industry. This event will be taking place on 20-23 March at McCormick Place, Chicago, USA. ProMat is the largest international material handling, logistics and [&#8230;]</p>
<p>The post <a href="https://www.kudan.io/blog/promat-exhibit-2023/">ProMat 2023: Kudan to exhibit and present its visual and 3D Lidar SLAM solutions for intralogistics robotics use cases</a> first appeared on <a href="https://www.kudan.io">Kudan global</a>.</p>]]></description>
										<content:encoded><![CDATA[<p><img loading="lazy" class="aligncenter wp-image-1595 size-large" src="https://i0.wp.com/www.kudan.io/wp-content/uploads/2023/03/Promat_1-1024x512.png?resize=1024%2C512&#038;ssl=1" alt="" width="1024" height="512" srcset="https://i0.wp.com/www.kudan.io/wp-content/uploads/2023/03/Promat_1.png?resize=1024%2C512&amp;ssl=1 1024w, https://i0.wp.com/www.kudan.io/wp-content/uploads/2023/03/Promat_1.png?resize=300%2C150&amp;ssl=1 300w, https://i0.wp.com/www.kudan.io/wp-content/uploads/2023/03/Promat_1.png?resize=768%2C384&amp;ssl=1 768w, https://i0.wp.com/www.kudan.io/wp-content/uploads/2023/03/Promat_1.png?w=1125&amp;ssl=1 1125w" sizes="(max-width: 1000px) 100vw, 1000px" data-recalc-dims="1" /></p>
<p>Kudan Inc. (headquartered in Shibuya-ku, Tokyo; CEO Daiu Ko, hereafter “Kudan”) will be exhibiting at the upcoming ProMat conference to showcase its solution for applications within the manufacturing and supply chain industry. This event will be taking place on 20-23 March at McCormick Place, Chicago, USA.</p>
<p>ProMat is the largest international material handling, logistics and supply chain show and conference featuring over 1,000 leading solution providers, including Kudan. As robotics for intralogistic use cases will be one of main interests for those attending, Kudan will be presenting both its camera-based and 3D Lidar-based “Simultaneous Localisation and Mapping” (SLAM) technology, KdVisual and KdLidar. With the robustness of Kudan’s SLAM, customers can equip their robots with a positioning stack that is able to handle warehouse environments that are typically large and dynamic in nature. This provides companies with the flexibility of not needing to implement infrastructure-based positioning systems which can be costly and time consuming. Companies can quickly change the physical operational workflows of their robots or vehicles as their operations evolve, without needing to reinstall any infrastructure.</p>
<p>We will be demonstrating both single-stereo and multi-stereo visual SLAM (VSLAM) capabilities on our robots at the booth. Customers will be able to see first hand how certain use cases can benefit from having a multi-camera set up on their robots or vehicles. We will be also highlighting our recent <a href="https://www.kudan.io/blog/kdvisual-has-launched-as-part-of-the-latest-release-of-intel-edge-insight-for-amr-platform/" target="_blank" rel="noopener">partnership</a> with Intel in a robot running <a href="https://www.intel.com/content/www/us/en/docs/ei-for-amr/developer-guide/2022-3-1/kudan-visual-slam.html" target="_blank" rel="noopener">Intel’s Edge Insight for AMR</a> with Kudan’s KdVisual SLAM software. Intel’s EI for AMRs helps customers to cut down development time through its open, modular Software Development Kit (SDK). The included libraries and middleware allows code to be implemented and deployed to AMRs even if they have multiple hardware configurations.</p>
<p>Customers interested in 3D Lidar SLAM solutions will also be able to view demo robots equipped with Kudan’s KdLidar SLAM software. These robots will be using Kudan’s SLAM to both create a map and navigate themselves autonomously within the booth space. In collaboration with one of our Lidar partners, Hesai, we will be demonstrating our Lidar SLAM software on our robot using the QT128, a certified safety laser scanner qualifying as an ASIL B product under ISO 26262 and can be used in AMR systems classified as ASIL A, ASIL B, and ASIL C. We will also be presenting two Lidar sensors from our partner Robosense &#8211; the ‘RS-Bpearl’, which has a unique 360°×90° super wide field-of-view (FoV) with less than 10cm blind-spot detection area, as well as the ‘RS-Helios’, which has a 70° ultra-wide vertical FoV, fulfilling both long-range perception and near-field blind spot detection needs.</p>
<p>Whether for indoor or outdoor, simple or complex environments, participants can visit Kudan’s booth to understand which sensor and SLAM system would best suit their intralogistic robotics applications. We look forward to discussing our SLAM solutions with you at ProMat 2023!</p>
<p><span style="text-decoration: underline;"><strong>Presentation details</strong></span></p>
<ul>
<li>Event: ProMat 2023 (<a href="https://www.promatshow.com/" target="_blank" rel="noopener">https://www.promatshow.com/</a>)</li>
<li>Date: Monday &#8211; Thursday, March 20 &#8211; 23, 2023</li>
<li>Location: McCormick Place, Chicago, USA</li>
<li>Booth: #N8229</li>
</ul>
<p><span style="text-decoration: underline;"><strong>About Kudan Inc.</strong></span><br />
Kudan is a deep tech research and development company specializing in algorithms for artificial perception (AP). As a complement to artificial intelligence (AI), AP functions allow machines to develop autonomy. Currently, Kudan is using its high-level technical innovation to explore business areas based on its own milestone models established for deep tech which provide wide-ranging impact on several major industrial fields.<br />
For more information, please refer to Kudan’s website at <a href="https://www.kudan.io/" target="_blank" rel="noopener noreferrer">https://www.kudan.io/</a>.</p>
<p>■Company Details<br />
Name: Kudan Inc.<br />
Securities Code: 4425 (TSE Growth)<br />
Representative: CEO Daiu Ko</p>
<p>■For more details, please contact us from <a href="https://www.kudan.io/contact" target="_blank" rel="noopener noreferrer">here</a>.</p><p>The post <a href="https://www.kudan.io/blog/promat-exhibit-2023/">ProMat 2023: Kudan to exhibit and present its visual and 3D Lidar SLAM solutions for intralogistics robotics use cases</a> first appeared on <a href="https://www.kudan.io">Kudan global</a>.</p>]]></content:encoded>
					
		
		
		<post-id xmlns="com-wordpress:feed-additions:1">1594</post-id>	</item>
	</channel>
</rss>
