Skip to main content

Figma

Figma is a collaborative web application for interface design.

This notebook covers how to load data from the Figma REST API into a format that can be ingested into LangChain, along with example usage for code generation.

import os

from langchain.chat_models import ChatOpenAI
from langchain.document_loaders.figma import FigmaFileLoader
from langchain.indexes import VectorstoreIndexCreator
from langchain.prompts.chat import (
ChatPromptTemplate,
HumanMessagePromptTemplate,
SystemMessagePromptTemplate,
)

The Figma API Requires an access token, node_ids, and a file key.

The file key can be pulled from the URL. https://www.figma.com/file/{filekey}/sampleFilename

Node IDs are also available in the URL. Click on anything and look for the ‘?node-id={node_id}’ param.

Access token instructions are in the Figma help center article: https://help.figma.com/hc/en-us/articles/8085703771159-Manage-personal-access-tokens

figma_loader = FigmaFileLoader(
os.environ.get("ACCESS_TOKEN"),
os.environ.get("NODE_IDS"),
os.environ.get("FILE_KEY"),
)
# see https://python.langchain.com/en/latest/modules/data_connection/getting_started.html for more details
index = VectorstoreIndexCreator().from_loaders([figma_loader])
figma_doc_retriever = index.vectorstore.as_retriever()
def generate_code(human_input):
# I have no idea if the Jon Carmack thing makes for better code. YMMV.
# See https://python.langchain.com/en/latest/modules/models/chat/getting_started.html for chat info
system_prompt_template = """You are expert coder Jon Carmack. Use the provided design context to create idiomatic HTML/CSS code as possible based on the user request.
Everything must be inline in one file and your response must be directly renderable by the browser.
Figma file nodes and metadata: {context}"""

human_prompt_template = "Code the {text}. Ensure it's mobile responsive"
system_message_prompt = SystemMessagePromptTemplate.from_template(
system_prompt_template
)
human_message_prompt = HumanMessagePromptTemplate.from_template(
human_prompt_template
)
# delete the gpt-4 model_name to use the default gpt-3.5 turbo for faster results
gpt_4 = ChatOpenAI(temperature=0.02, model_name="gpt-4")
# Use the retriever's 'get_relevant_documents' method if needed to filter down longer docs
relevant_nodes = figma_doc_retriever.get_relevant_documents(human_input)
conversation = [system_message_prompt, human_message_prompt]
chat_prompt = ChatPromptTemplate.from_messages(conversation)
response = gpt_4(
chat_prompt.format_prompt(
context=relevant_nodes, text=human_input
).to_messages()
)
return response
response = generate_code("page top header")

Returns the following in response.content:

<!DOCTYPE html>\n<html lang="en">\n<head>\n    <meta charset="UTF-8">\n    <meta name="viewport" content="width=device-width, initial-scale=1.0">\n    <style>\n        @import url(\'https://fonts.googleapis.com/css2?family=DM+Sans:wght@500;700&family=Inter:wght@600&display=swap\');\n\n        body {\n            margin: 0;\n            font-family: \'DM Sans\', sans-serif;\n        }\n\n        .header {\n            display: flex;\n            justify-content: space-between;\n            align-items: center;\n            padding: 20px;\n            background-color: #fff;\n            box-shadow: 0 2px 4px rgba(0, 0, 0, 0.1);\n        }\n\n        .header h1 {\n            font-size: 16px;\n            font-weight: 700;\n            margin: 0;\n        }\n\n        .header nav {\n            display: flex;\n            align-items: center;\n        }\n\n        .header nav a {\n            font-size: 14px;\n            font-weight: 500;\n            text-decoration: none;\n            color: #000;\n            margin-left: 20px;\n        }\n\n        @media (max-width: 768px) {\n            .header nav {\n                display: none;\n            }\n        }\n    </style>\n</head>\n<body>\n    <header class="header">\n        <h1>Company Contact</h1>\n        <nav>\n            <a href="#">Lorem Ipsum</a>\n            <a href="#">Lorem Ipsum</a>\n            <a href="#">Lorem Ipsum</a>\n        </nav>\n    </header>\n</body>\n</html>