Skip to content

Commit f585e2c

Browse files
committed
'robots_disallow' argument removed
1 parent cff8697 commit f585e2c

File tree

5 files changed

+15
-11
lines changed

5 files changed

+15
-11
lines changed

CHANGELOG

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,8 @@
11

2+
1.1 - December 2012
3+
=====================
4+
- remove 'robots_disallow' argument
5+
26
1.0 - December 2012
37
=====================
48
- initial working release

README.rst

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -28,12 +28,12 @@ The recommended way to install django-url-robots is with `pip <http://pypi.pytho
2828
url(r'^robots.txt$', 'url_robots.views.robots_txt'),
2929
)
3030

31-
4. Describe rules by boolean keyword arguments ``robots_allow`` and ``robots_disallow`` using for it ``url_robots.utils.url`` instead ``django.conf.urls.defaults.url``::
31+
4. Describe rules by boolean keyword argument ``robots_allow`` using for it ``url_robots.utils.url`` instead ``django.conf.urls.defaults.url``::
3232

3333
from url_robots.utils import url
3434
3535
urlpatterns += patterns('',
36-
url('^profile/private$', 'view', robots_disallow=True),
36+
url('^profile/private$', 'view', robots_allow=False),
3737
)
3838
3939
``django-url-robots`` tested with ``Django-1.3``. Encodes unicode characters by percent-encoding.
@@ -71,7 +71,7 @@ urls_profile.py::
7171
urlpatterns = patterns('',
7272
url(r'^s$', 'view', name='profiles', robots_allow=True),
7373
url(r'^/(?P<nick>\w+)$', 'view'),
74-
url(r'^/(?P<nick>\w+)/private', 'view', name='profile_private', robots_disallow=True),
74+
url(r'^/(?P<nick>\w+)/private', 'view', name='profile_private', robots_allow=False),
7575
url(r'^/(?P<nick>\w+)/public', 'view', name='profile_public', robots_allow=True),
7676
)
7777

setup.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@
44

55
setup(
66
name='django-url-robots',
7-
version='1.0.4',
7+
version='1.1',
88
description='Django robots.txt generator',
99
long_description=long_description,
1010
url='http://github.com/dimka665/django-url-robots',

url_robots/tests/urls_profile.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,6 @@
77
urlpatterns = patterns('',
88
url(r'^s$', 'view', name='profiles', robots_allow=True),
99
url(r'^/(?P<nick>\w+)$', 'view'),
10-
url(r'^/(?P<nick>\w+)/private', 'view', name='profile_private', robots_disallow=True),
10+
url(r'^/(?P<nick>\w+)/private', 'view', name='profile_private', robots_allow=False),
1111
url(r'^/(?P<nick>\w+)/public', 'view', name='profile_public', robots_allow=True),
1212
)

url_robots/utils.py

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -10,11 +10,10 @@
1010
# decorator for django.conf.urls.defaults.url
1111
def robots_decorator(url_function):
1212
def url_extended(regex, view, kwargs=None, name=None, prefix='',
13-
robots_allow=None, robots_disallow=None):
13+
robots_allow=None):
1414
resolver_or_pattern = url_function(regex, view, kwargs=kwargs, name=name, prefix=prefix)
1515

1616
resolver_or_pattern.robots_allow = robots_allow
17-
resolver_or_pattern.robots_disallow = robots_disallow
1817
return resolver_or_pattern
1918

2019
return url_extended
@@ -37,14 +36,15 @@ def create_rule_list(parent_resolver, abs_pattern):
3736
for resolver in parent_resolver.url_patterns:
3837
pattern = join_patterns(abs_pattern, resolver.regex.pattern)
3938

40-
robots_allow = getattr(resolver, 'robots_allow', None)
41-
robots_disallow = getattr(resolver, 'robots_disallow', None)
4239
rule = ''
40+
robots_allow = getattr(resolver, 'robots_allow', None)
4341

44-
if robots_disallow:
45-
rule = 'Disallow: '
42+
if robots_allow is None:
43+
pass
4644
elif robots_allow:
4745
rule = 'Allow: '
46+
else:
47+
rule = 'Disallow: '
4848

4949
if rule:
5050
path = clean_pattern(pattern)

0 commit comments

Comments
 (0)