Skip to content

Using Langflow APIs

  • Login to Langflow service
  • Create a flow
  • Create an API endpoint name. e.g. my-api-endpoint
    • Click on flow name > Edit Details > Endpoint Name
  • Get an access token using Practicus AI SDK
  • Note the LLM model token as well, API calls do not use tokens you saved in the UI
  • Make API calls
service_url = "https://langflow.dev.practicus.io"
# The below is defined in Langflow UI.
# Open a flow, click on flow name > Edit Details > Endpoint Name
endpoint_name = "my-api-endpoint"
assert service_url, "Please define service_url"
assert endpoint_name, "Please define endpoint_name"

api_url = f"{service_url}/api/v1/run/{endpoint_name}?stream=false"
print("API url:", api_url)
# e.g. https://langflow.dev.practicus.io/api/v1/run/api-test1?stream=false
import practicuscore as prt

region = prt.current_region()
token = None  # Get a new token, or reuse existing if not expired.
access_token = region.get_addon_session_token(key="langflow", token=token)
print("Access token for addon:", access_token)
open_ai_token = ""
assert open_ai_token, "Please define open_ai_token"
import requests

headers = {"Content-Type": "application/json", f"Authorization": f"Bearer {access_token}"}

payload = {
    "input_value": "message",
    "output_type": "chat",
    "input_type": "chat",
    "tweaks": {
        "ChatInput-MRIWj": {},
        "Prompt-KvhR7": {},
        "ChatOutput-CuWil": {},
        "OpenAIModel-dmT1W": {"api_key": open_ai_token},
    },
}

response = requests.post(api_url, headers=headers, json=payload)

print(response.status_code)
result = response.json()
print(result)

Previous: Milvus Chain | Next: Ecomm Sdk > Memory Chabot