Introduction

The Friendli Python SDK provides a powerful and flexible way to interact with FriendliAI services, including Serverless Endpoints, Dedicated Endpoints, and Container. This allows developers to easily integrate their Python applications with FriendliAI.

Installation

The SDK can be installed with either pip or poetry:

# Using pip
pip install friendli

# Using poetry
poetry add friendli

Authentication

Authentication is done using a Friendli Token, which can be generated from the Friendli Suite in your Personal Settings:

import os
from friendli import SyncFriendli

with SyncFriendli(
    token=os.environ["FRIENDLI_TOKEN"],
) as friendli:
    # Your code here

For detailed instructions on generating a Friendli Token, see the Personal Access Tokens guide.

Chat Completions

The SDK supports chat completions across all deployment types. Choose the deployment option that best fits your needs.

import os
from friendli import SyncFriendli

with SyncFriendli(
    token=os.environ["FRIENDLI_TOKEN"],
) as friendli:
    res = friendli.serverless.chat.complete(
        messages=[
            {
                "content": "You are a helpful assistant.",
                "role": "system",
            },
            {
                "content": "Hello!",
                "role": "user",
            },
        ],
        model="meta-llama-3.1-8b-instruct",
        max_tokens=200,
    )

    print(res)

Asynchronous Chat Completions

import asyncio
import os
from friendli import AsyncFriendli

async def main():
    async with AsyncFriendli(
        token=os.environ["FRIENDLI_TOKEN"],
    ) as friendli:
        res = await friendli.serverless.chat.complete(
            messages=[
                {
                    "content": "You are a helpful assistant.",
                    "role": "system",
                },
                {
                    "content": "Hello!",
                    "role": "user",
                },
            ],
            model="meta-llama-3.1-8b-instruct",
            max_tokens=200,
        )

        print(res)

asyncio.run(main())

Tool-Assisted Chat Completions

Tool-assisted chat completions are only available for Serverless endpoints.

import os
from friendli import SyncFriendli

with SyncFriendli(
    token=os.environ["FRIENDLI_TOKEN"],
) as friendli:
    res = friendli.serverless.tool_assisted_chat.complete(
        messages=[
            {
                "content": "What is 3 + 6?",
                "role": "user",
            },
        ],
        model="meta-llama-3.1-8b-instruct",
        max_tokens=200,
        tools=[
            {
                "type": "math:calculator",
            },
        ],
    )

    print(res)

Advanced Features

Streaming Responses

The SDK supports streaming responses using server-sent events, which can be consumed using a simple for loop:

import os
from friendli import SyncFriendli

with SyncFriendli(
    token=os.environ["FRIENDLI_TOKEN"],
) as friendli:
    res = friendli.serverless.chat.stream(
        messages=[
            {
                "content": "You are a helpful assistant.",
                "role": "system",
            },
            {
                "content": "Hello!",
                "role": "user",
            },
        ],
        model="meta-llama-3.1-8b-instruct",
        max_tokens=200,
    )

    with res as event_stream:
        for event in event_stream:
            # Process each chunk as it arrives
            print(event, flush=True)

Custom Retry Strategy

You can customize retry behavior for operations that support retries:

import os
from friendli import SyncFriendli
from friendli.utils import BackoffStrategy, RetryConfig

with SyncFriendli(
    token=os.environ["FRIENDLI_TOKEN"],
) as friendli:
    res = friendli.serverless.chat.complete(
        messages=[
            {
                "content": "You are a helpful assistant.",
                "role": "system",
            },
            {
                "content": "Hello!",
                "role": "user",
            },
        ],
        model="meta-llama-3.1-8b-instruct",
        max_tokens=200,
        retries=RetryConfig("backoff", BackoffStrategy(1, 50, 1.1, 100), False),
    )

    # Handle response
    print(res)

Error Handling

The SDK provides comprehensive error handling with detailed exception information:

import os
from friendli import SyncFriendli, models

with SyncFriendli(
    token=os.environ["FRIENDLI_TOKEN"],
) as friendli:
    try:
        res = friendli.dedicated.endpoint.create(
            advanced={
                "tokenizer_add_special_tokens": True,
                "tokenizer_skip_special_tokens": False,
            },
            hf_model_repo="<value>",
            instance_option_id="<id>",
            name="<value>",
            project_id="<id>",
        )

        # Handle response
        print(res)

    except models.HTTPValidationError as e:
        # Handle validation errors
        print(f"Validation error: {e.data}")
    except models.SDKError as e:
        # Handle general SDK errors
        print(f"Error {e.status_code}: {e.message}")

Custom Logging

You can pass your own logger to the client class to help troubleshoot and diagnose issues during API interactions. This is especially useful when you encounter unexpected behavior or errors.

import logging
import os

from friendli import SyncFriendli

# Configure your custom logger, for example:
logger = logging.getLogger(__name__)
logging.basicConfig(
    format="[%(filename)s:%(lineno)s - %(funcName)s()] %(message)s",
    level=logging.INFO,
    handlers=[logging.StreamHandler()],
)

with SyncFriendli(
    server_url=SERVER_URL,
    token=TOKEN,
    debug_logger=logger,  # Pass your logger here
) as friendli:
    # Your code here
    pass

Beta Features

Dataset Management (Beta)

Our SDK provides a straightforward way to create, retrieve, and update datasets within your projects. Datasets can contain samples across various modalities—such as text, images, and more—allowing flexible and comprehensive dataset construction for your fine-tuning and validation workflows.

import os

from friendli.friendli import SyncFriendli
from friendli.models import Sample

TEAM_ID = os.environ["FRIENDLI_TEAM_ID"]
PROJECT_ID = os.environ["FRIENDLI_PROJECT_ID"]
TOKEN = os.environ["FRIENDLI_TOKEN"]


with SyncFriendli(
    token=TOKEN,
    x_friendli_team=TEAM_ID,
) as friendli:
    # Create dataset
    with friendli.dataset.create(
        modality=["TEXT", "IMAGE"],
        name="test-create-dataset-sync",
        project_id=PROJECT_ID,
    ) as dataset:
        # Read dataset
        with open("dataset.jsonl", "rb") as f:
            data = [Sample.model_validate_json(line) for line in f]

        # Add samples to dataset
        dataset.upload_samples(
            samples=data,
            split="train",
        )

File Management (Beta)

You can download and upload files to and from our database. This feature is primarily designed for storing sample files related to datasets, with additional use cases planned for the future.

import io
import os
from hashlib import sha256

import httpx

from friendli import SyncFriendli

TEAM_ID = os.environ["FRIENDLI_TEAM_ID"]
PROJECT_ID = os.environ["FRIENDLI_PROJECT_ID"]
TOKEN = os.environ["FRIENDLI_TOKEN"]


with SyncFriendli(
    token=TOKEN,
) as friendli:
    # Read data from file
    with open("lorem.txt", "rb") as f:
        data = f.read()

    # Inititate upload
    init_upload_res = friendli.file.init_upload(
        digest=f"sha256:{sha256(data).hexdigest()}",
        name="lorem.txt",
        project_id=PROJECT_ID,
        size=len(data),
        x_friendli_team=TEAM_ID,
    )

    # Upload to S3
    if init_upload_res.upload_url is not None:
        httpx.post(
            url=init_upload_res.upload_url,
            data=init_upload_res.aws,
            files={"file": io.BytesIO(data)},
            timeout=60,
        ).raise_for_status()

    # Complete upload
    friendli.file.complete_upload(
        file_id=init_upload_res.file_id,
        x_friendli_team=TEAM_ID,
    )

    # Get download URL
    get_download_url_res = friendli.file.get_download_url(
        file_id=init_upload_res.file_id,
        x_friendli_team=TEAM_ID,
    )
    print(get_download_url_res.download_url)

Further Resources

For complete API documentation, advanced usage examples, and detailed reference information, please visit the Friendli Python SDK GitHub repository.