Skip to content

Commit 31055ec

Browse files
Up to v1.4alt
1 parent 0858f22 commit 31055ec

File tree

12 files changed

+43
-41
lines changed

12 files changed

+43
-41
lines changed

MANIFEST.in

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -4,5 +4,5 @@ include LICENSE
44
include MANIFEST.in
55
include README.rst
66

7-
graft url_robots
8-
recursive-exclude url_robots *.pyc *.pyo
7+
graft robots
8+
recursive-exclude robots *.pyc *.pyo

README.rst

Lines changed: 13 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
=========================
2-
django-url-robots
2+
django-robots
33
=========================
44

55
``Django`` ``robots.txt`` generator. Based on using decorated ``django.conf.urls.url``.
@@ -8,43 +8,43 @@ It gets ``urlpatterns`` and replaces ambiguous parts by ``*``.
88
Installation & Usage
99
=========================
1010

11-
The recommended way to install django-url-robots is with `pip <http://pypi.python.org/pypi/pip>`_
11+
The recommended way to install django-robots is with `pip <http://pypi.python.org/pypi/pip>`_
1212

1313
1. Install from PyPI with ``easy_install`` or ``pip``::
1414

15-
pip install django-url-robots
15+
pip install git+https://github.com/valeriansaliou/django-robots
1616

17-
2. Add ``'url_robots'`` to your ``INSTALLED_APPS``::
17+
2. Add ``'robots'`` to your ``INSTALLED_APPS``::
1818

1919
INSTALLED_APPS = (
2020
...
21-
'url_robots',
21+
'robots',
2222
...
2323
)
2424

25-
3. Add url_robots view to your root URLconf::
25+
3. Add robots view to your root URLconf::
2626

2727
urlpatterns += patterns('',
28-
url(r'^robots\.txt$', 'url_robots.views.robots_txt'),
28+
url(r'^robots\.txt$', 'robots.views.robots_txt'),
2929
)
3030

31-
4. Describe rules by boolean keyword argument ``robots_allow`` using for it ``url_robots.utils.url`` instead ``django.conf.urls.url``::
31+
4. Describe rules by boolean keyword argument ``robots_allow`` using for it ``robots.utils.url`` instead ``django.conf.urls.url``::
3232

33-
from url_robots.utils import url
33+
from robots.utils import url
3434
3535
urlpatterns += patterns('',
3636
url('^profile/private$', 'view', robots_allow=False),
3737
)
3838
39-
``django-url-robots`` tested with ``Django-1.3``. Encodes unicode characters by percent-encoding.
39+
``django-robots`` tested with ``Django-1.3``. Encodes unicode characters by percent-encoding.
4040

4141
Settings
4242
====================
4343

4444
In this moment there are only one option to define template of ``robots.txt`` file::
4545

4646
urlpatterns += patterns('',
47-
url(r'^robots\.txt$', 'url_robots.views.robots_txt', {'template': 'my_awesome_robots_template.txt'}),
47+
url(r'^robots\.txt$', 'robots.views.robots_txt', {'template': 'my_awesome_robots_template.txt'}),
4848
)
4949

5050
Example
@@ -60,13 +60,13 @@ urls.py::
6060
from django.conf.urls import patterns, include
6161

6262
urlpatterns = patterns('',
63-
url(r'^profile', include('url_robots.tests.urls_profile')),
63+
url(r'^profile', include('robots.tests.urls_profile')),
6464
)
6565

6666
urls_profile.py::
6767

6868
from django.conf.urls import patterns
69-
from url_robots.utils import url
69+
from robots.utils import url
7070

7171
urlpatterns = patterns('',
7272
url(r'^s$', 'view', name='profiles', robots_allow=True),
File renamed without changes.
File renamed without changes.
File renamed without changes.

url_robots/tests/tests.py renamed to robots/tests/tests.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@
33
# todo On Django 1.4 change testcase to django.test.SimpleTestCase
44
from django.utils import unittest
55

6-
from url_robots.utils import clean_pattern, join_patterns, create_rules
6+
from robots.utils import clean_pattern, join_patterns, create_rules
77

88

99
class CleanPatternTestCase(unittest.TestCase):
@@ -137,11 +137,11 @@ def test_1(self):
137137

138138
class CreateRulesTestCase(unittest.TestCase):
139139
def setUp(self):
140-
self.expected_rules = '''Allow: /profiles$ # /profiles$ name=profiles
141-
Disallow: /profile/*/private* # /profile/*/private* name=profile_private
142-
Allow: /profile/*/public* # /profile/*/public* name=profile_public'''
140+
self.expected_rules = '''Allow: /profiles$
141+
Disallow: /profile/*/private*
142+
Allow: /profile/*/public*'''
143143

144144
def test_create_rules_for_profiles(self):
145-
rules = create_rules('url_robots.tests.urls')
145+
rules = create_rules('robots.tests.urls')
146146
self.assertEqual(rules, self.expected_rules)
147147

robots/tests/urls.py

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,10 @@
1+
#coding=utf-8
2+
3+
from django.conf.urls import patterns, include
4+
5+
from robots.utils import url
6+
7+
8+
urlpatterns = patterns('',
9+
url(r'^profile', include('robots.tests.urls_profile')),
10+
)

url_robots/tests/urls_profile.py renamed to robots/tests/urls_profile.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@
22

33
from django.conf.urls import patterns
44

5-
from url_robots.utils import url
5+
from robots.utils import url
66

77
urlpatterns = patterns('',
88
url(r'^s$', 'view', name='profiles', robots_allow=True),
File renamed without changes.

url_robots/views.py renamed to robots/views.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
from django.shortcuts import render
22

3-
from url_robots.utils import create_rules
3+
from robots.utils import create_rules
44

55

66
def robots_txt(request, template='robots.txt'):

setup.py

Lines changed: 11 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -1,19 +1,21 @@
1+
# -*- coding: utf-8 -*-
2+
13
from distutils.core import setup
24

35
long_description = open('README.rst').read()
46

57
setup(
6-
name='django-url-robots',
7-
version='1.3alt',
8+
name='django-robots',
9+
version='1.4alt',
810
description='Django robots.txt generator',
911
long_description=long_description,
10-
url='http://github.com/dimka665/django-url-robots',
11-
author='Dmitry Voronin',
12-
author_email='dimka665@gmail.com',
12+
url='https://github.com/valeriansaliou/django-robots',
13+
author='Valérian Saliou',
14+
author_email='valerian@valeriansaliou.name',
1315
license='Python Software Foundation License',
14-
packages=['url_robots', 'url_robots.tests'],
15-
package_data={'url_robots': ['templates/*.*']},
16-
platforms=["any"],
16+
packages=['robots', 'robots.tests'],
17+
package_data={'robots': ['templates/*.*']},
18+
platforms=['any'],
1719
classifiers=[
1820
'Development Status :: 5 - Production/Stable',
1921
'Intended Audience :: Developers',
@@ -23,5 +25,5 @@
2325
'Programming Language :: Python',
2426
'Environment :: Web Environment',
2527
'Framework :: Django',
26-
],
28+
],
2729
)

url_robots/tests/urls.py

Lines changed: 0 additions & 10 deletions
This file was deleted.

0 commit comments

Comments
 (0)