[{"data":1,"prerenderedAt":142},["ShallowReactive",2],{"/reference/media-analysis":3,"/reference/media-analysis-surround":133},{"id":4,"title":5,"body":6,"description":123,"extension":124,"links":125,"meta":126,"navigation":128,"path":129,"seo":130,"stem":131,"__hash__":132},"docs/2.reference/media-analysis.md","Media analysis",{"type":7,"value":8,"toc":115},"minimark",[9,12,19,22,25,28,41,44,49,57,64,73,77,83,86,94,98,104,107],[10,11],"capacities-believer-label",{},[13,14,15],"p",{},[16,17,18],"em",{},"Last Updated: April 2026",[13,20,21],{},"Media analysis lets Capacities understand your media files so you can search and work with them like any other object in your space.",[13,23,24],{},"At launch, media analysis is available for Capacities Believers with AI features enabled and only runs in spaces where AI is turned on. Analyses are triggered manually (for example by clicking Analyze on an image); if AI is disabled for a space, none of its media can be analyzed.",[13,26,27],{},"Media analysis:",[29,30,31,35,38],"ul",{},[32,33,34],"li",{},"Turns images and other media into rich, searchable metadata.",[32,36,37],{},"Powers smart title updates for images with generic titles (for example “image” or “screenshot”), while leaving meaningful titles you have written yourself unchanged.",[32,39,40],{},"Makes extracted metadata available as context for the AI assistant, instead of repeatedly sending the original media files.",[13,42,43],{},"After the Beta testing phase, media analysis will be available for Pro users with AI enabled.",[45,46,48],"h2",{"id":47},"image-analysis","Image analysis",[13,50,51,52,56],{},"Image analysis is the first media analysis feature and is available already. It lets you analyze an image ",[53,54,55],"strong",{},"once"," to extract a better title, description, text, categories, and colors, and stores all of that as metadata on the image object.",[13,58,59],{},[60,61],"img",{"alt":62,"src":63},"analysed image","reference/media/images/image-analysis/analyzed-image-hero.png",[13,65,66,67,72],{},"For full details and examples, see the dedicated ",[68,69,71],"a",{"href":70},"/reference/basic-types/images#image-analysis","image analysis section",".",[45,74,76],{"id":75},"weblink-analysis","Weblink analysis",[13,78,79,80,72],{},"Weblink analysis is part of the media analysis roadmap and is ",[53,81,82],{},"coming soon",[13,84,85],{},"It will extract useful metadata and content snippets from weblink objects so you can:",[29,87,88,91],{},[32,89,90],{},"Search and filter based on the content of linked pages.",[32,92,93],{},"Use the extracted information as context for the AI assistant.",[45,95,97],{"id":96},"audio-analysis","Audio analysis",[13,99,100,101,103],{},"Audio analysis is also ",[53,102,82],{}," as part of media analysis.",[13,105,106],{},"It will make it possible to:",[29,108,109,112],{},[32,110,111],{},"Transcribe audio objects so the spoken words become searchable text.",[32,113,114],{},"Extract summaries and key points that the AI assistant can use as context.",{"title":116,"searchDepth":117,"depth":118,"links":119},"",1,2,[120,121,122],{"id":47,"depth":118,"text":48},{"id":75,"depth":118,"text":76},{"id":96,"depth":118,"text":97},"Use AI to extract searchable information from your images and other media.","md",null,{"image":127},"/reference/media/media-analysis.jpg",true,"/reference/media-analysis",{"title":5,"description":123},"2.reference/media-analysis","JFd8j-sTZ6B9LYAKQnpYkS_NuVtyXIoz99XzgLePFn8",[134,138],{"title":135,"path":136,"stem":137,"children":-1},"WhatsApp integration","/reference/integrations/whatsapp","2.reference/integrations/whatsapp",{"title":139,"path":140,"stem":141,"children":-1},"Mermaid Diagrams","/reference/mermaid-diagrams","2.reference/mermaid-diagrams",1775642815450]