Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
24 commits
Select commit Hold shift + click to select a range
5ebe708
BE-1: Alembic migration to drop unique constraint for multiple connec…
CREDO23 Jan 6, 2026
9f75a3f
BE-1: Add connector_naming.py utilities for friendly auto-naming and …
CREDO23 Jan 6, 2026
21d45b8
BE-1: Allow multiple connectors of same type per search space (remove…
CREDO23 Jan 6, 2026
d7b8890
BE-2: Remove duplicate checks and auto-generate user-friendly names f…
CREDO23 Jan 6, 2026
7900d6a
BE-2: Remove duplicate checks and enable auto-generation of user-frie…
CREDO23 Jan 6, 2026
c58a3fb
BE-2: Remove duplicate logic and enable auto-friendly naming for Line…
CREDO23 Jan 6, 2026
d75df7e
BE-2: Remove duplicate-check logic and enable user-friendly auto-nami…
CREDO23 Jan 6, 2026
d979c15
BE-2: Enforce unique connector names per user and search space (idemp…
CREDO23 Jan 6, 2026
4c6a782
feat: add extract_identifier_from_credentials to connector naming
CREDO23 Jan 7, 2026
932222b
feat: add fetch_google_user_email and update Google OAuth routes
CREDO23 Jan 7, 2026
d03b8da
feat: add Linear org name fetch and update route
CREDO23 Jan 7, 2026
42397f1
feat: add Airtable user email fetch and update route
CREDO23 Jan 7, 2026
0ba64fe
feat: update OAuth routes to use async connector naming
CREDO23 Jan 7, 2026
93c7b83
feat: show identifier-only display names in connector cards
CREDO23 Jan 7, 2026
755f923
fix: connector card and edit view styling
CREDO23 Jan 7, 2026
2508b37
feat: add connector accounts list view for OAuth connectors with mult…
CREDO23 Jan 7, 2026
9ad1348
feat: add connectorId support for multi-account OAuth connectors
CREDO23 Jan 7, 2026
3ff87a2
feat: improve connector popup with grouped OAuth connectors
CREDO23 Jan 7, 2026
4b3d427
feat: prevent duplicate OAuth account connections
CREDO23 Jan 7, 2026
f1a715e
refactor: move Linear OAuth utils to connector, use httpx.AsyncClient
CREDO23 Jan 7, 2026
5f0013c
fix: restore duplicate check for non-OAuth connectors
CREDO23 Jan 7, 2026
4de2815
fix: connector card UI improvements
CREDO23 Jan 7, 2026
b664547
style: format web codebase
CREDO23 Jan 7, 2026
9841bdd
style: format backend with ruff
CREDO23 Jan 7, 2026
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
@@ -0,0 +1,55 @@
"""Allow multiple connectors of same type per search space

Revision ID: 57
Revises: 56
Create Date: 2026-01-06 12:00:00.000000

"""

from collections.abc import Sequence

from alembic import op

# revision identifiers, used by Alembic.
revision: str = "57"
down_revision: str | None = "56"
branch_labels: str | Sequence[str] | None = None
depends_on: str | Sequence[str] | None = None

from sqlalchemy import text


def upgrade() -> None:
connection = op.get_bind()
constraint_exists = connection.execute(
text("""
SELECT 1 FROM information_schema.table_constraints
WHERE table_name='search_source_connectors'
AND constraint_type='UNIQUE'
AND constraint_name='uq_searchspace_user_connector_type'
""")
).scalar()
if constraint_exists:
op.drop_constraint(
"uq_searchspace_user_connector_type",
"search_source_connectors",
type_="unique",
)


def downgrade() -> None:
connection = op.get_bind()
constraint_exists = connection.execute(
text("""
SELECT 1 FROM information_schema.table_constraints
WHERE table_name='search_source_connectors'
AND constraint_type='UNIQUE'
AND constraint_name='uq_searchspace_user_connector_type'
""")
).scalar()
if not constraint_exists:
op.create_unique_constraint(
"uq_searchspace_user_connector_type",
"search_source_connectors",
["search_space_id", "user_id", "connector_type"],
)
Original file line number Diff line number Diff line change
@@ -0,0 +1,55 @@
"""
Add unique constraint for (search_space_id, user_id, name) on search_source_connectors.

Revision ID: 58
Revises: 57
Create Date: 2026-01-06 14:00:00.000000

"""

from collections.abc import Sequence

from alembic import op

revision: str = "58"
down_revision: str | None = "57"
branch_labels: str | Sequence[str] | None = None
depends_on: str | Sequence[str] | None = None

from sqlalchemy import text


def upgrade() -> None:
connection = op.get_bind()
constraint_exists = connection.execute(
text("""
SELECT 1 FROM information_schema.table_constraints
WHERE table_name='search_source_connectors'
AND constraint_type='UNIQUE'
AND constraint_name='uq_searchspace_user_connector_name'
""")
).scalar()
if not constraint_exists:
op.create_unique_constraint(
"uq_searchspace_user_connector_name",
"search_source_connectors",
["search_space_id", "user_id", "name"],
)


def downgrade() -> None:
connection = op.get_bind()
constraint_exists = connection.execute(
text("""
SELECT 1 FROM information_schema.table_constraints
WHERE table_name='search_source_connectors'
AND constraint_type='UNIQUE'
AND constraint_name='uq_searchspace_user_connector_name'
""")
).scalar()
if constraint_exists:
op.drop_constraint(
"uq_searchspace_user_connector_name",
"search_source_connectors",
type_="unique",
)
40 changes: 40 additions & 0 deletions surfsense_backend/app/connectors/airtable_connector.py
Original file line number Diff line number Diff line change
Expand Up @@ -382,3 +382,43 @@ def format_record_to_markdown(
markdown_parts.append("")

return "\n".join(markdown_parts)


# --- OAuth User Info ---

AIRTABLE_WHOAMI_URL = "https://api.airtable.com/v0/meta/whoami"


async def fetch_airtable_user_email(access_token: str) -> str | None:
"""
Fetch user email from Airtable whoami API.

Args:
access_token: The Airtable OAuth access token

Returns:
User's email address or None if fetch fails
"""
try:
async with httpx.AsyncClient() as client:
response = await client.get(
AIRTABLE_WHOAMI_URL,
headers={"Authorization": f"Bearer {access_token}"},
timeout=10.0,
)

if response.status_code == 200:
data = response.json()
email = data.get("email")
if email:
logger.debug(f"Fetched Airtable user email: {email}")
return email

logger.warning(
f"Failed to fetch Airtable user info: {response.status_code}"
)
return None

except Exception as e:
logger.warning(f"Error fetching Airtable user email: {e!s}")
return None
29 changes: 29 additions & 0 deletions surfsense_backend/app/connectors/google_gmail_connector.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@

import base64
import json
import logging
import re
from typing import Any

Expand All @@ -21,6 +22,34 @@
SearchSourceConnectorType,
)

logger = logging.getLogger(__name__)


def fetch_google_user_email(credentials: Credentials) -> str | None:
"""
Fetch user email from Gmail API using Google credentials.

Uses the Gmail users.getProfile endpoint which returns the authenticated
user's email address.

Args:
credentials: Google OAuth Credentials object (not encrypted)

Returns:
User's email address or None if fetch fails
"""
try:
service = build("gmail", "v1", credentials=credentials)
profile = service.users().getProfile(userId="me").execute()
email = profile.get("emailAddress")
if email:
logger.debug(f"Fetched Google user email: {email}")
return email
return None
except Exception as e:
logger.warning(f"Error fetching Google user email: {e!s}")
return None


class GoogleGmailConnector:
"""Class for retrieving emails from Gmail using Google OAuth credentials."""
Expand Down
52 changes: 51 additions & 1 deletion surfsense_backend/app/connectors/linear_connector.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,18 +9,65 @@
from datetime import datetime
from typing import Any

import httpx
import requests
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy.future import select

from app.config import config
from app.db import SearchSourceConnector
from app.routes.linear_add_connector_route import refresh_linear_token
from app.schemas.linear_auth_credentials import LinearAuthCredentialsBase
from app.utils.oauth_security import TokenEncryption

logger = logging.getLogger(__name__)

LINEAR_GRAPHQL_URL = "https://api.linear.app/graphql"

ORGANIZATION_QUERY = """
query {
organization {
name
}
}
"""


async def fetch_linear_organization_name(access_token: str) -> str | None:
"""
Fetch organization/workspace name from Linear GraphQL API.

Args:
access_token: The Linear OAuth access token

Returns:
Organization name or None if fetch fails
"""
try:
async with httpx.AsyncClient() as client:
response = await client.post(
LINEAR_GRAPHQL_URL,
headers={
"Authorization": access_token,
"Content-Type": "application/json",
},
json={"query": ORGANIZATION_QUERY},
timeout=10.0,
)

if response.status_code == 200:
data = response.json()
org_name = data.get("data", {}).get("organization", {}).get("name")
if org_name:
logger.debug(f"Fetched Linear organization name: {org_name}")
return org_name

logger.warning(f"Failed to fetch Linear org info: {response.status_code}")
return None

except Exception as e:
logger.warning(f"Error fetching Linear organization name: {e!s}")
return None


class LinearConnector:
"""Class for retrieving issues and comments from Linear."""
Expand Down Expand Up @@ -121,6 +168,9 @@ async def _get_valid_token(self) -> str:
f"Connector {self._connector_id} not found; cannot refresh token."
)

# Lazy import to avoid circular dependency
from app.routes.linear_add_connector_route import refresh_linear_token

# Refresh token
connector = await refresh_linear_token(self._session, connector)

Expand Down
76 changes: 43 additions & 33 deletions surfsense_backend/app/routes/airtable_add_connector_route.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,9 +11,9 @@
from pydantic import ValidationError
from sqlalchemy.exc import IntegrityError
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy.future import select

from app.config import config
from app.connectors.airtable_connector import fetch_airtable_user_email
from app.db import (
SearchSourceConnector,
SearchSourceConnectorType,
Expand All @@ -22,6 +22,10 @@
)
from app.schemas.airtable_auth_credentials import AirtableAuthCredentialsBase
from app.users import current_active_user
from app.utils.connector_naming import (
check_duplicate_connector,
generate_unique_connector_name,
)
from app.utils.oauth_security import OAuthStateManager, TokenEncryption

logger = logging.getLogger(__name__)
Expand Down Expand Up @@ -275,6 +279,8 @@ async def airtable_callback(
status_code=400, detail="No access token received from Airtable"
)

user_email = await fetch_airtable_user_email(access_token)

# Calculate expiration time (UTC, tz-aware)
expires_at = None
if token_json.get("expires_in"):
Expand All @@ -297,48 +303,52 @@ async def airtable_callback(
credentials_dict = credentials.to_dict()
credentials_dict["_token_encrypted"] = True

# Check if connector already exists for this search space and user
existing_connector_result = await session.execute(
select(SearchSourceConnector).filter(
SearchSourceConnector.search_space_id == space_id,
SearchSourceConnector.user_id == user_id,
SearchSourceConnector.connector_type
== SearchSourceConnectorType.AIRTABLE_CONNECTOR,
)
# Check for duplicate connector (same account already connected)
is_duplicate = await check_duplicate_connector(
session,
SearchSourceConnectorType.AIRTABLE_CONNECTOR,
space_id,
user_id,
user_email,
)
existing_connector = existing_connector_result.scalars().first()

if existing_connector:
# Update existing connector
existing_connector.config = credentials_dict
existing_connector.name = "Airtable Connector"
existing_connector.is_indexable = True
logger.info(
f"Updated existing Airtable connector for user {user_id} in space {space_id}"
if is_duplicate:
logger.warning(
f"Duplicate Airtable connector detected for user {user_id} with email {user_email}"
)
else:
# Create new connector
new_connector = SearchSourceConnector(
name="Airtable Connector",
connector_type=SearchSourceConnectorType.AIRTABLE_CONNECTOR,
is_indexable=True,
config=credentials_dict,
search_space_id=space_id,
user_id=user_id,
)
session.add(new_connector)
logger.info(
f"Created new Airtable connector for user {user_id} in space {space_id}"
return RedirectResponse(
url=f"{config.NEXT_FRONTEND_URL}/dashboard/{space_id}/new-chat?modal=connectors&tab=all&error=duplicate_account&connector=airtable-connector"
)

# Generate a unique, user-friendly connector name
connector_name = await generate_unique_connector_name(
session,
SearchSourceConnectorType.AIRTABLE_CONNECTOR,
space_id,
user_id,
user_email,
)
# Create new connector
new_connector = SearchSourceConnector(
name=connector_name,
connector_type=SearchSourceConnectorType.AIRTABLE_CONNECTOR,
is_indexable=True,
config=credentials_dict,
search_space_id=space_id,
user_id=user_id,
)
session.add(new_connector)
logger.info(
f"Created new Airtable connector for user {user_id} in space {space_id}"
)

try:
await session.commit()
logger.info(f"Successfully saved Airtable connector for user {user_id}")

# Redirect to the frontend with success params for indexing config
# Using query params to auto-open the popup with config view on new-chat page
return RedirectResponse(
url=f"{config.NEXT_FRONTEND_URL}/dashboard/{space_id}/new-chat?modal=connectors&tab=all&success=true&connector=airtable-connector"
url=f"{config.NEXT_FRONTEND_URL}/dashboard/{space_id}/new-chat?modal=connectors&tab=all&success=true&connector=airtable-connector&connectorId={new_connector.id}"
)

except ValidationError as e:
Expand All @@ -350,7 +360,7 @@ async def airtable_callback(
await session.rollback()
raise HTTPException(
status_code=409,
detail=f"Integrity error: A connector with this type already exists. {e!s}",
detail=f"Database integrity error: {e!s}",
) from e
except Exception as e:
logger.error(f"Failed to create search source connector: {e!s}")
Expand Down
Loading
Loading