<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
<oembed><version>1.0</version><type>rich</type><width>560px</width><height>140px</height><title>Ep. 5 &amp;#8211; LLMs on Raspberry Pi: Optimized for High-Throughput - This document explores optimizing Large Language Models (LLMs) for resource-constrained edge devices like the Raspberry Pi, aiming to overcome challenges in computational efficiency, power consumption, and response latency.</title><url>https://hockeymikey.mn/2025/06/22/ep-5-llms-on-raspberry-pi-optimized-for-high-throughput/</url><author_name>Mikey's research podcast - A really nice podcast generated using AI and love</author_name><author_url>https://hockeymikey.mn</author_url><thumbnail_url>http://hockeymikey.mn/wp-content/uploads/2025/06/M_Logo_beta-scaled.png</thumbnail_url><html>&lt;iframe width="560px" height="140px" src="https://hockeymikey.mn/2025/06/22/ep-5-llms-on-raspberry-pi-optimized-for-high-throughput/?standalonePlayer"&gt;&lt;/iframe&gt;</html></oembed>
