Skip to content
This repository has been archived by the owner on Apr 25, 2023. It is now read-only.

Commit

Permalink
fix bug in vote time_string
Browse files Browse the repository at this point in the history
  • Loading branch information
OriHoch committed Feb 10, 2016
1 parent 7b18186 commit f6dce59
Show file tree
Hide file tree
Showing 3 changed files with 27 additions and 11 deletions.
8 changes: 8 additions & 0 deletions laws/management/commands/scrape_votes.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,3 +50,11 @@ def _create_new_object(self, dataservice_vote):
# if v.full_text_url != None:
# l = Link(title=u'מסמך הצעת החוק באתר הכנסת', url=v.full_text_url, content_type=ContentType.objects.get_for_model(v), object_pk=str(v.id))
# l.save()

def _recreate_object(self, vote_id):
vote = Vote.objects.get(id=int(vote_id))
vote_src_id = vote.src_id
dataservice_vote = self.DATASERVICE_CLASS.get(vote_src_id)
VoteAction.objects.filter(vote=vote).delete()
vote.delete()
return self._create_new_object(dataservice_vote)
2 changes: 1 addition & 1 deletion simple/scrapers/__init__.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import locale


def hebrew_strftime(dt, fmt=u'%A %m %B %Y %H:%M'):
def hebrew_strftime(dt, fmt=u'%A %d %B %Y %H:%M'):
locale.setlocale(locale.LC_ALL, 'he_IL.utf8')
return dt.strftime(fmt).decode('utf8')
28 changes: 18 additions & 10 deletions simple/scrapers/management.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,8 @@ def _has_existing_object(self, dataservice_object):
def _create_new_object(self, dataservice_object):
raise NotImplementedError()

def _recreate_object(self, recreate_param):
raise NotImplementedError()


class ReachedMaxItemsException(Exception):
Expand All @@ -24,6 +26,7 @@ class BaseKnessetDataserviceCollectionCommand(BaseKnessetDataserviceCommand):
option_list = BaseKnessetDataserviceCommand.option_list + (
make_option('--page-range', dest='pagerange', default='1-10', help="range of page number to scrape (e.g. --page-range=5-12), default is 1-10"),
make_option('--max-items', dest='maxitems', default='0', help='maximum number of items to process'),
make_option('--re-create', dest='recreate', default='', help='item id to delete and then re-create'),
)

def _handle_page(self, page_num):
Expand All @@ -37,13 +40,18 @@ def _handle_page(self, page_num):
raise ReachedMaxItemsException('reached maxitems')

def _handle_noargs(self, **options):
page_range = options['pagerange']
first, last = map(int, page_range.split('-'))
self._max_items = int(options['maxitems'])
self._num_items = 0
for page_num in range(first, last+1):
self._log_debug('page %s'%page_num)
try:
self._handle_page(page_num)
except ReachedMaxItemsException:
break
if (options['recreate'] != ''):
self._log_info('recreating object %s'%options['recreate'])
vote = self._recreate_object(options['recreate'])
self._log_info('created as object %s'%vote.pk)
else:
page_range = options['pagerange']
first, last = map(int, page_range.split('-'))
self._max_items = int(options['maxitems'])
self._num_items = 0
for page_num in range(first, last+1):
self._log_debug('page %s'%page_num)
try:
self._handle_page(page_num)
except ReachedMaxItemsException:
break

0 comments on commit f6dce59

Please sign in to comment.