Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
25 commits
Select commit Hold shift + click to select a range
75f73f8
Minimal working setup
kelvinkipruto Apr 24, 2024
b92ef3e
Working version with DB
kelvinkipruto May 6, 2024
a4541de
Cleanup
kelvinkipruto May 6, 2024
aec1820
Run time improvements
kelvinkipruto May 6, 2024
8c0b06f
Remove unused imports
kelvinkipruto May 6, 2024
95dae7f
Merge branch 'main' of https://github.com/CodeForAfrica/api into ft/m…
kelvinkipruto May 6, 2024
9e17c89
Docker files
kelvinkipruto May 6, 2024
1469485
validate robots.txt
kelvinkipruto May 7, 2024
1e1c00d
Improve script to capture extra required fields
kelvinkipruto May 14, 2024
3140ecb
Rename to content_access_bot
kelvinkipruto May 14, 2024
906ba75
use case insensitivity when matching crawlers
kelvinkipruto May 17, 2024
e1dd2e4
Improve url redirects check
kelvinkipruto May 17, 2024
f74769b
Update list of crawlers
kelvinkipruto May 17, 2024
73a0031
use environs instead of dotenv
kelvinkipruto May 17, 2024
d8981e1
Misc improvements
kelvinkipruto May 17, 2024
883a8ab
Code changes
kelvinkipruto Oct 4, 2024
b551b3e
Working Update
kelvinkipruto Jun 13, 2025
09bc272
Refactor database imports to use sqliteDB module
kelvinkipruto Jun 17, 2025
f13a25c
Improve script reliability
kelvinkipruto Jun 17, 2025
782b921
Fix SQL table definition to allow NULL values for archived robots fields
kelvinkipruto Jun 18, 2025
a2761a5
Simplified working scrapper
kelvinkipruto Jun 19, 2025
a1d7374
Update interpreter constraints to include Python 3.10
kelvinkipruto Jun 19, 2025
df6e7a3
Enhance database connection timeout and improve robots fetching logic
kelvinkipruto Jun 24, 2025
b3352ff
refactor(db): implement site checks tracking system
kelvinkipruto Sep 5, 2025
7ab4278
Merge branch 'main' into ft/midiadata-init
kelvinkipruto Sep 5, 2025
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -168,3 +168,6 @@ cython_debug/
# Custom gitignore
*.db
# End of custom ignore

*.csv
*.xlsx
6 changes: 6 additions & 0 deletions 3rdparty/py/requirements-all.txt
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
aiohttp==3.9.3
backoff==2.2.1
boto3==1.40.23
celery==5.5.3
dj-rest-auth==7.0.1
Expand All @@ -17,8 +19,12 @@ greenlet==3.2.4
gunicorn[gevent, setproctitle]==23.0.0
html2text==2025.4.15
lxml==6.0.1
openpyxl==3.1.5
pandas==2.3.0
pyairtable==2.3.3
redis==6.4.0
requests==2.32.5
scrapy==2.12.0
sentry-sdk==2.36.0
tablib[xlsx]==3.8.0
trafilatura==1.12.2
Expand Down
5 changes: 5 additions & 0 deletions content_access_bot/.env.example
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
AIRTABLE_BASE_ID=
AIRTABLE_API_KEY=
AIRTABLE_ORGANISATION_TABLE=
AIRTABLE_CONTENT_TABLE=
DB_FILE=content_access_bot.db
33 changes: 33 additions & 0 deletions content_access_bot/docker/BUILD
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
python_sources()
docker_image(
name="content_access_bot-deps",
image_tags=["deps"],
build_platform=["linux/amd64", "linux/arm64"],
registries=["content_access_bot"],
repository="app",
skip_push=True,
source="Dockerfile.deps",
)

file(name="app.json", source="app.json")

docker_image(
name="content_access_bot-srcs",
image_tags=["srcs"],
build_platform=["linux/amd64", "linux/arm64"],
registries=["content_access_bot"],
repository="app",
skip_push=True,
source="Dockerfile.srcs",
)

docker_image(
name="content_access_bot",
build_platform=["linux/amd64", "linux/arm64"],
dependencies=[":content_access_bot-srcs", ":content_access_bot-deps", ":app.json"],
image_tags=[
"{build_args.VERSION}",
"latest",
],
source="Dockerfile",
)
11 changes: 11 additions & 0 deletions content_access_bot/docker/Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
FROM python:3.11-slim-bookworm AS python-base
FROM content_access_bot/app:deps AS app-deps
FROM content_access_bot/app:srcs AS app-srcs
FROM python-base AS python-app

WORKDIR /app
COPY content_access_bot/docker/app.json ./
COPY --from=app-deps /app ./
COPY --from=app-srcs /app ./

CMD ["tail", "-f", "/dev/null"]
4 changes: 4 additions & 0 deletions content_access_bot/docker/Dockerfile.deps
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
FROM python:3.11-slim-bookworm

COPY content_access_bot.py/content_access_bot-deps@environment=linux.pex /content_access_bot-deps.pex
RUN PEX_TOOLS=1 python /content_access_bot-deps.pex venv --scope=deps --compile /app
4 changes: 4 additions & 0 deletions content_access_bot/docker/Dockerfile.srcs
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
FROM python:3.11-slim-bookworm

COPY content_access_bot.py/content_access_bot-srcs@environment=linux.pex /content_access_bot-srcs.pex
RUN PEX_TOOLS=1 python /content_access_bot-srcs.pex venv --scope=srcs --compile /app
9 changes: 9 additions & 0 deletions content_access_bot/docker/app.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
{
"name": "content_access_bot",
"cron": [
{
"command": "./pex",
"schedule": "@daily"
}
]
}
46 changes: 46 additions & 0 deletions content_access_bot/py/BUILD
Original file line number Diff line number Diff line change
@@ -0,0 +1,46 @@
python_sources(
name="lib",
dependencies=[
"3rdparty/py:requirements-all#aiohttp",
"3rdparty/py:requirements-all#backoff",
"3rdparty/py:requirements-all#environs",
"3rdparty/py:requirements-all#pyairtable",
"3rdparty/py:requirements-all#scrapy",
"3rdparty/py:requirements-all#openpyxl",
"3rdparty/py:requirements-all#pandas",
"content_access_bot/py/pipeline.py:lib"
],
)

pex_binary(
name="content_access_bot-deps",
environment=parametrize("__local__", "linux"),
dependencies=[
":lib",
],
entry_point="main.py",
include_sources=False,
include_tools=True,
layout="packed",
)

pex_binary(
name="content_access_bot-srcs",
environment=parametrize("__local__", "linux"),
dependencies=[
":lib",
],
entry_point="main.py",
include_requirements=False,
include_tools=True,
layout="packed",
)


pex_binary(
name="content_access_bot",
dependencies=[
":lib",
],
entry_point="main.py",
)
1 change: 1 addition & 0 deletions content_access_bot/py/VERSION
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
0.0.1
85 changes: 85 additions & 0 deletions content_access_bot/py/airtable.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,85 @@
from pyairtable import Api
from utils import validate_url, clean_url
import os
import logging
import re
from environs import Env
env = Env()
dotenv_path = os.path.join(os.path.dirname(__file__), '..', '.env')

env.read_env(dotenv_path)


logging.basicConfig(level=logging.INFO,
format='%(asctime)s - %(levelname)s - %(message)s')

api_key = os.getenv('AIRTABLE_API_KEY')
base_id = os.getenv('AIRTABLE_BASE_ID')
organisations_table = os.getenv('AIRTABLE_ORGANISATION_TABLE')
content_table = os.getenv('AIRTABLE_CONTENT_TABLE')

if not api_key or not base_id or not organisations_table or not content_table:
raise ValueError('API key, base ID and Organisation table are required')

at = Api(api_key)
Comment on lines +2 to +24

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

[P0] Import pyairtable Api before use

The module instantiates Api(api_key) without importing the class, so importing airtable.py raises NameError: Api is not defined before any functionality can run. Add from pyairtable import Api (or the appropriate module) near the other imports.

Useful? React with 👍 / 👎.



def get_table_data(table_name, formula=None, fields=None):
if not base_id:
logging.error(f"AIRTABLE_BASE_ID Not Provided")
return
table = at.table(base_id, table_name)
return table.all(formula=formula, fields=fields)


def get_formula(allowed_countries=None):
base_formula = 'AND(NOT({Organisation Name} = ""), NOT({Website} = ""), NOT({HQ Country} = ""))'
if allowed_countries:
countries_formula = ', '.join(
[f'({{HQ Country}} = "{country}")' for country in allowed_countries])
formula = f'AND({base_formula}, OR({countries_formula}))'
else:
formula = base_formula
return formula


def process_records(data):
organizations = []
for record in data:
website = validate_url(record['fields'].get('Website', None))
name = record['fields'].get('Organisation Name', None)
country = record['fields'].get('HQ Country', None)
id: str = record['id']
if website:
org = {}
org['id'] = id
org['name'] = re.sub(
r'[\\/*?:"<>|]', '-', name) if name else None
org['url'] = clean_url(website)
org['country'] = country

organizations.append(org)
return organizations


def get_organizations(allowed_countries=None):
logging.info('Fetching organizations from Airtable')
formula = get_formula(allowed_countries)
fields = ['Organisation Name', 'Website', 'HQ Country']
data = get_table_data(organisations_table, formula, fields)
organizations = process_records(data)
logging.info(f'Fetched {len(organizations)} organizations')
return organizations


async def batch_upsert_organizations(data):
logging.info('Upserting organizations in Airtable')
try:
if not base_id or not content_table:
logging.error(f"AIRTABLE_BASE_ID or AIRTABLE_CONTENT_TABLE Not Provided")
return
table = at.table(base_id, content_table)
table.batch_upsert(records=data, key_fields=['id',])
logging.info('Organizations upserted successfully')
except Exception as e:
logging.error(f'Error upserting organization: {e}')
Loading