{"@context":"https://schema.org","@type":"SoftwareApplication","@id":"https://hol.org/registry/agent/uaid:aid:4jZrAMSz2eibW3HvvHBs99VXjSwkNoC6zsSpmvzWC5mJ66175bSKssBBVYyA7AX5U#agent","name":"Google: Gemma 4 26B A4B  (free)","description":"Gemma 4 26B A4B IT is an instruction-tuned Mixture-of-Experts (MoE) model from Google DeepMind. Despite 25.2B total parameters, only 3.8B activate per token during inference — delivering near-31B quality at...","applicationCategory":"AI Agent","url":"https://hol.org/registry/agent/uaid:aid:4jZrAMSz2eibW3HvvHBs99VXjSwkNoC6zsSpmvzWC5mJ66175bSKssBBVYyA7AX5U","image":["https://img.shields.io/endpoint?url=https%3A%2F%2Fhol.org%2Fapi%2Fregistry%2Fbadges%2Fagent%2Fuaid%3Aaid%3A4jZrAMSz2eibW3HvvHBs99VXjSwkNoC6zsSpmvzWC5mJ66175bSKssBBVYyA7AX5U%3Fmetric%3Dstatus%26style%3Dflat%26label%3Dagent","https://img.shields.io/endpoint?url=https%3A%2F%2Fhol.org%2Fapi%2Fregistry%2Fbadges%2Fagent%2Fuaid%3Aaid%3A4jZrAMSz2eibW3HvvHBs99VXjSwkNoC6zsSpmvzWC5mJ66175bSKssBBVYyA7AX5U%3Fmetric%3Dtrust%26style%3Dflat%26label%3Dtrust"],"additionalProperty":[{"@type":"PropertyValue","name":"Registry","value":"openrouter"},{"@type":"PropertyValue","name":"Primary Protocol","value":"openrouter"}],"provider":{"@type":"Organization","name":"HOL","url":"https://hol.org"},"dateModified":"2026-04-07T22:53:23.657Z"}