(function() { var utmInheritingDomain = "appstore.com", utmRegExp = /(&|\?)utm_[A-Za-z]+=[A-Za-z0-9]+/gi, links = document.getElementsByTagName("a"), utms = [ "utm_medium={{URL - utm_medium}}", "utm_source={{URL - utm_source}}", "utm_campaign={{URL - utm_campaign}}" ]; for (var index = 0; index < links.length; index += 1) { var tempLink = links[index].href, tempParts; if (tempLink.indexOf(utmInheritingDomain) > 0) { tempLink = tempLink.replace(utmRegExp, ""); tempParts = tempLink.split("#"); if (tempParts[0].indexOf("?") < 0 ) { tempParts[0] += "?" + utms.join("&"); } else { tempParts[0] += "&" + utms.join("&"); } tempLink = tempParts.join("#"); } links[index].href = tempLink; } }());

Browse generative AI models
supported by Friendli Inference


HOW TO USE

Three ways to run generative AI models with Friendli Inference:

01

Friendli Dedicated Endpoints

Build and run generative AI models on autopilot

Learn more

02

Friendli Container

Serve LLM and LMM inferences with Friendli Inference in your GPU environment

Learn more

03

Friendli Serverless Endpoints

Call our fast and affordable API for open-source generative AI models

Learn more