Skip to content

Commit

Permalink
Merge pull request #27 from experius/feature/PWAI-460
Browse files Browse the repository at this point in the history
[FEATURE][PWAI-546] Multiple stores
  • Loading branch information
lewisvoncken authored Jul 19, 2022
2 parents da973bf + f87bf32 commit 324c9f7
Show file tree
Hide file tree
Showing 4 changed files with 35 additions and 48 deletions.
16 changes: 9 additions & 7 deletions seosnap/urls.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@

website_cache_redo_tags = views.RedoPageCache.as_view({'post': 'cache_redo_tag'})
website_cache_redo_website = views.RedoPageCache.as_view({'post': 'cache_redo_website'})
multiple_website_cache_redo_website = views.RedoPageCache.as_view({'post': 'multiple_cache_redo_website'})
website_cache_redo_pages = views.RedoPageCache.as_view({'post': 'cache_redo_addresses'})

# URLs for multiple sites
Expand All @@ -40,7 +41,7 @@
path('websites/<int:pk>', website_detail, name='websites-retrieve'),
path('websites/<int:website_id>/reporting', website_reporting_failure, name='websites-reporting'),

path('websites/<int:website_id>/log', website_report, name='websites-report-loging'),
path('websites/log', website_report, name='websites-report-loging'),

path('websites/<int:website_id>/pages', website_pages, name='websites-pages-list'),
path('websites/<int:website_id>/pages/count', website_pages_count, name='websites-pages-count'),
Expand All @@ -53,22 +54,23 @@
path('websites/<int:website_id>/queue/progression', website_queue_progression, name='websites-queue-list-progression'),
path('websites/<int:website_id>/queue/todo/count', website_queue_todo_count, name='websites-queue-todo-count'),
path('websites/<int:website_id>/queue/redo/old', website_queue_redo_old, name='websites-queue-redo-old'),
path('websites/<int:website_id>/queue/<int:queue_item_id>/priority', website_queue_priority_update, name='websites-queue-priority-update'),
path('websites/<int:website_id>/queue/items/priority', website_queue_items_priority_update, name='websites-queue-items-priority-update'),
path('queue/<int:queue_item_id>/priority', website_queue_priority_update, name='websites-queue-priority-update'),
path('queue/items/priority', website_queue_items_priority_update, name='websites-queue-items-priority-update'),

path('websites/<int:website_id>/queue/update', website_queue_update, name='websites-queue-update'),

path('websites/<int:website_id>/queue/clean', website_queue_clean, name='websites-queue-clean'),
path('websites/<int:website_id>/queue/<int:queue_item_id>/delete', website_queue_delete_item, name='websites-queue-delete-item'),
path('websites/<int:website_id>/queue/items/delete', website_queue_delete_multiple_items, name='websites-multiple-queue-delete-items'),
path('queue/<int:queue_item_id>/delete', website_queue_delete_item, name='websites-queue-delete-item'),
path('queue/items/delete', website_queue_delete_multiple_items, name='websites-multiple-queue-delete-items'),

path('websites/<int:website_id>/cache/redo/tags', website_cache_redo_tags, name='websites-cache-redo-tags'),
path('websites/<int:website_id>/cache/redo/website', website_cache_redo_website, name='websites-cache-redo-website'),
path('cache/redo/website', website_cache_redo_website, name='websites-cache-redo-website'),
path('multiple/cache/redo/website', multiple_website_cache_redo_website, name='multiple-websites-cache-redo-website'),
path('websites/<int:website_id>/pages/redo', website_cache_redo_pages, name='websites-cache-redo-addresses'),

# TODO Multiple get
path('pages', pages, name='pages-list'),
path('queues', queues, name='queues-list'),
# path('cache/redo/tags', website_pages, name='websites-pages-list'),
path('cache/redo/tags', cache_redo_tags, name='websites-pages-list'),

]
41 changes: 13 additions & 28 deletions seosnap/views/page.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,13 +59,9 @@ def _multiThreadedPageSync(self, website_id, website, urlsData, doCacheAgain):
for urlData in urlsData:
urlsList.add(urlData['loc'])

existingPages = list(
Page.objects.filter(website_id=website_id).filter(address__in=urlsList).values_list('address',
'updated_at'))
existingPages = Page.objects.filter(website_id=website_id).filter(address__in=urlsList)
addresses = Page.objects.values_list('address', flat=True)

print(addresses)

for urlData in urlsData:

if urlData['loc'] in addresses:
Expand All @@ -75,15 +71,14 @@ def _multiThreadedPageSync(self, website_id, website, urlsData, doCacheAgain):
# print(type(existingPages))
for page in existingPages:

if page[0] == urlData['loc'] and urlData['lastmod'] is not None:
if page.address == urlData['loc'] and urlData['lastmod'] is not None:
# if last mod is longer than X min ago
# Or later than page updated at
# [1] => updated_at

sitemapDiff = page[1] - urlData['lastmod']
rendertronDiff = page[1] - doCacheAgain
sitemapDiff = page.updated_at - urlData['lastmod']
rendertronDiff = page.updated_at - doCacheAgain
if (sitemapDiff.total_seconds() <= 0) or (rendertronDiff.total_seconds() <= 0):
print("do queue again")

queue_item_found = QueueItem.objects.filter(page=page).filter(status="unscheduled").first()
if queue_item_found is None:
Expand Down Expand Up @@ -272,18 +267,15 @@ def cache_redo_tag(self, request, version, website_id=None):
return HttpResponse(status=200)

@decorators.action(detail=True, methods=['post'])
def cache_redo_website(self, request, version, website_id=None):
print(" --- start request ---")

website: Website = Website.objects.filter(id=website_id).first()
def cache_redo_website(self, request, version):
if request.data['pageId']:
prio = 10000
if request.data['priority']:
prio = 1

page: Page = website.pages.filter(id=request.data['pageId'])
page: Page = Page.objects.filter(id=request.data['pageId'])

queue_item: QueueItem = QueueItem(page=page[0], website=website, priority=prio)
queue_item: QueueItem = QueueItem(page=page[0], website=page[0].website, priority=prio)
queue_item.save()

data = serialize("json", [queue_item], fields=('page', 'website', 'status', 'priority', 'created_at'))
Expand All @@ -293,22 +285,15 @@ def cache_redo_website(self, request, version, website_id=None):
return Response([''])

@decorators.action(detail=True, methods=['post'])
def cache_redo_addresses(self, request, version, website_id=None):
createQueueObjects = []
website: Website = Website.objects.filter(id=website_id).first()

if request.data:
recachePages = Page.objects.filter(website_id=website_id).filter(address__in=request.data.values())
def multiple_cache_redo_website(self, request, version):
pages = Page.objects.filter(id__in=request.data.values())

for page in recachePages:
queue_item: QueueItem = QueueItem(page=page, website=website, priority=10000)
createQueueObjects.append(queue_item)

QueueItem.objects.bulk_create(createQueueObjects)
for page in pages:
queue_item: QueueItem = QueueItem(page=page, website=page.website, priority=100)
queue_item.save()

return HttpResponse(status=200)
return HttpResponse(status=200)

return HttpResponse(status=404)


class Pages(viewsets.ViewSet, PageNumberPagination):
Expand Down
16 changes: 6 additions & 10 deletions seosnap/views/queue.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,6 @@ def redo_old(self, request, version, website_id=None):
.filter(updated_at__lte=doCacheAgain.date())\
.update(status='unscheduled')

# TODO run this command + run completed queue clean command
return HttpResponse(status=200)

@decorators.action(detail=True, methods=['get'])
Expand All @@ -35,7 +34,7 @@ def queue(self, request, version, website_id=None):

data = website.queue_items.filter(status='unscheduled') \
.order_by('priority', '-created_at') \
.all()[:50]
.all()[:25]

with transaction.atomic():
for item in data:
Expand Down Expand Up @@ -91,10 +90,9 @@ class QueueWebsiteUpdate(viewsets.ViewSet):
])

@decorators.action(detail=True, methods=['put'])
def update_priority(self, request, version, website_id=None, queue_item_id=None):
def update_priority(self, request, version, queue_item_id=None):
print("im hereee")
item = QueueItem.objects \
.filter(website_id=website_id) \
.filter(id=queue_item_id) \
.first()

Expand All @@ -107,9 +105,8 @@ def update_priority(self, request, version, website_id=None, queue_item_id=None)
return HttpResponse(status=200)

@decorators.action(detail=True, methods=['post'])
def items_update_priority(self, request, version, website_id=None):
def items_update_priority(self, request, version):
QueueItem.objects \
.filter(website_id=website_id) \
.filter(id__in=request.data.values()) \
.update(priority=10)

Expand Down Expand Up @@ -155,17 +152,16 @@ def clean_queue(self, request, version, website_id=None):
return Response([])

@decorators.action(detail=True, methods=['delete'])
def delete_queue_item(self, request, version, website_id=None, queue_item_id=None):
def delete_queue_item(self, request, version, queue_item_id=None):
QueueItem.objects \
.filter(website_id=website_id) \
.filter(id=queue_item_id) \
.delete()

return HttpResponse(status=200)

@decorators.action(detail=True, methods=['post'])
def delete_multiple_queue_items(self, request, version, website_id=None):
QueueItem.objects.filter(website_id=website_id).filter(id__in=request.data.values()).delete()
def delete_multiple_queue_items(self, request, version):
QueueItem.objects.filter(id__in=request.data.values()).delete()

return HttpResponse(status=200)

Expand Down
10 changes: 7 additions & 3 deletions seosnap/views/website.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,16 +24,20 @@ class WebsiteViewSet(viewsets.ModelViewSet):

class WebsiteReport(viewsets.ViewSet):
@decorators.action(detail=True, methods=['get'])
def get_logging(self, request, version, website_id=None):
def get_logging(self, request, version):
website_ids = []
if request.query_params.getlist('website_ids'):
website_ids = request.query_params.getlist('website_ids')

one_hour_ago = timezone.now() - timedelta(hours=1)
lastHourUpdated = Page.objects \
.filter(website_id=website_id) \
.filter(website_id__in=website_ids) \
.filter(cache_status='cached') \
.filter(updated_at__gte=one_hour_ago) \
.count()

pages = Page.objects \
.filter(website_id=website_id) \
.filter(website_id__in=website_ids) \
.filter(cache_status='cached') \
.order_by('-updated_at')[:50]
pageSerializer = PageSerializer(pages, many=True)
Expand Down

0 comments on commit 324c9f7

Please sign in to comment.